diff --git a/.github/workflows/invoke_test_runner.yml b/.github/workflows/invoke_test_runner.yml index f30e105394..16b6e15c61 100644 --- a/.github/workflows/invoke_test_runner.yml +++ b/.github/workflows/invoke_test_runner.yml @@ -35,7 +35,7 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - - uses: actions/setup-node@v3 + - uses: actions/setup-node@v4 with: node-version: '14' - run: npm install --prefix .github/workflows @@ -50,7 +50,7 @@ jobs: - id: 'auth' name: 'Authenticate to Google Cloud' - uses: google-github-actions/auth@v1.1.1 + uses: google-github-actions/auth@v2.1.2 with: credentials_json: ${{ secrets.GOOGLE_APPLICATION_CREDENTIALS }} diff --git a/.github/workflows/master.yml b/.github/workflows/master.yml index 975b85c197..e0a2801043 100644 --- a/.github/workflows/master.yml +++ b/.github/workflows/master.yml @@ -14,9 +14,9 @@ jobs: steps: - uses: actions/checkout@v4 - - uses: gradle/wrapper-validation-action@v1 + - uses: gradle/wrapper-validation-action@v3 - name: Set up JDK 11 - uses: actions/setup-java@v3 + uses: actions/setup-java@v4 with: java-version: '11' distribution: 'corretto' diff --git a/.github/workflows/pull_request.yml b/.github/workflows/pull_request.yml index b361a48d8f..5e90decd21 100644 --- a/.github/workflows/pull_request.yml +++ b/.github/workflows/pull_request.yml @@ -16,9 +16,9 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - - uses: gradle/wrapper-validation-action@v1 + - uses: gradle/wrapper-validation-action@v3 - name: Set up JDK 11 - uses: actions/setup-java@v3 + uses: actions/setup-java@v4 with: java-version: '11' distribution: 'corretto' diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 0d1b866047..21b31d32ee 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -18,9 +18,9 @@ jobs: steps: - uses: actions/checkout@v4 - - uses: gradle/wrapper-validation-action@v1 + - uses: gradle/wrapper-validation-action@v3 - name: Set up JDK 11 - uses: actions/setup-java@v3 + uses: actions/setup-java@v4 with: java-version: '11' distribution: 'corretto' diff --git a/.github/workflows/stale-pr-issue.yml b/.github/workflows/stale-pr-issue.yml new file mode 100644 index 0000000000..f36da60e2a --- /dev/null +++ b/.github/workflows/stale-pr-issue.yml @@ -0,0 +1,46 @@ +# Mark inactive issues and PRs as stale +# GitHub action based on https://github.com/actions/stale + +name: 'Close stale issues and PRs' +on: + schedule: + # Execute every day + - cron: '0 0 * * *' + +permissions: + issues: write + pull-requests: write + +jobs: + close-pending: + runs-on: ubuntu-latest + steps: + - uses: actions/stale@v9 + with: + # GLOBAL ------------------------------------------------------------ + # Exempt any PRs or issues already added to a milestone + exempt-all-milestones: true + # Days until issues or pull requests are labelled as stale + days-before-stale: 60 + + # ISSUES ------------------------------------------------------------ + # Issues will be closed after 90 days of inactive (60 to mark as stale + 30 to close) + days-before-issue-close: 30 + stale-issue-message: > + Hello, this issue has been inactive for 60 days, so we're marking it as stale. + If you would like to continue this discussion, please comment within the next 30 days or we'll close the issue. + close-issue-message: > + Hello, as this issue has been inactive for 90 days, we're closing the issue. + If you would like to resume the discussion, please create a new issue. + exempt-issue-labels: keep-open + + # PULL REQUESTS ----------------------------------------------------- + # PRs will be closed after 90 days of inactive (60 to mark as stale + 30 to close) + days-before-pr-close: 30 + stale-pr-message: > + Hello, this pull request has been inactive for 60 days, so we're marking it as stale. + If you would like to continue working on this pull request, please make an update within the next 30 days, or we'll close the pull request. + close-pr-message: > + Hello, as this pull request has been inactive for 90 days, we're closing this pull request. + We always welcome contributions, and if you would like to continue, please open a new pull request. + exempt-pr-labels: keep-open diff --git a/README.md b/README.md index 5a68fa5ba7..e065da305d 100644 --- a/README.md +++ b/README.md @@ -4,6 +4,8 @@ Discuss and ask questions in our Discussions: https://github.com/graphql-java/gr This is a [GraphQL](https://github.com/graphql/graphql-spec) Java implementation. +Latest build in Maven central: https://repo1.maven.org/maven2/com/graphql-java/graphql-java/ + [![Build](https://github.com/graphql-java/graphql-java/actions/workflows/master.yml/badge.svg)](https://github.com/graphql-java/graphql-java/actions/workflows/master.yml) [![Latest Release](https://img.shields.io/maven-central/v/com.graphql-java/graphql-java?versionPrefix=21.)](https://maven-badges.herokuapp.com/maven-central/com.graphql-java/graphql-java/) [![Latest Snapshot](https://img.shields.io/maven-central/v/com.graphql-java/graphql-java?label=maven-central%20snapshot&versionPrefix=0)](https://maven-badges.herokuapp.com/maven-central/com.graphql-java/graphql-java/) @@ -11,6 +13,8 @@ This is a [GraphQL](https://github.com/graphql/graphql-spec) Java implementation ### Documentation +The GraphQL Java book, from the maintainers: [GraphQL with Java and Spring](https://leanpub.com/graphql-java/) + See our tutorial for beginners: [Getting started with GraphQL Java and Spring Boot](https://www.graphql-java.com/tutorials/getting-started-with-spring-boot/) For further details, please see the documentation: https://www.graphql-java.com/documentation/getting-started diff --git a/additionallicenses/APACHE-LICENSE-2.0.txt b/additionallicenses/APACHE-LICENSE-2.0.txt new file mode 100644 index 0000000000..7a4a3ea242 --- /dev/null +++ b/additionallicenses/APACHE-LICENSE-2.0.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/agent-test/build.gradle b/agent-test/build.gradle new file mode 100644 index 0000000000..280aed1bac --- /dev/null +++ b/agent-test/build.gradle @@ -0,0 +1,57 @@ +plugins { + id 'java' +} + +dependencies { + implementation(rootProject) + implementation("net.bytebuddy:byte-buddy-agent:1.14.13") + + testImplementation 'org.junit.jupiter:junit-jupiter:5.10.2' + testRuntimeOnly 'org.junit.platform:junit-platform-launcher' + + testImplementation("org.assertj:assertj-core:3.25.3") + +} + +java { + toolchain { + languageVersion = JavaLanguageVersion.of(11) + } +} + +tasks.named('test', Test) { + dependsOn(':agent:shadowJar') + useJUnitPlatform() + + maxHeapSize = '4G' + + testLogging { + events "passed" + } +} + + +repositories { + mavenCentral() + mavenLocal() +} + + +java { + toolchain { + languageVersion = JavaLanguageVersion.of(11) + } +} + + +jar { + manifest { + attributes( + 'Agent-Class': 'graphql.agent.GraphQLJavaAgent', + 'Can-Redefine-Classes': 'true', + 'Can-Retransform-Classes': 'true', + 'Premain-Class': 'graphql.agent.GraphQLJavaAgent' + ) + } +} + diff --git a/agent-test/src/main/java/graphql/GraphQLApp.java b/agent-test/src/main/java/graphql/GraphQLApp.java new file mode 100644 index 0000000000..9d12bfdb15 --- /dev/null +++ b/agent-test/src/main/java/graphql/GraphQLApp.java @@ -0,0 +1,40 @@ +package graphql; + +import graphql.agent.result.ExecutionTrackingResult; +import graphql.schema.GraphQLSchema; +import graphql.schema.idl.RuntimeWiring; +import graphql.schema.idl.SchemaGenerator; +import graphql.schema.idl.SchemaParser; +import graphql.schema.idl.TypeDefinitionRegistry; + +/** + * Used for testing loading the agent on startup. + * See StartAgentOnStartupTest + */ +public class GraphQLApp { + + public static void main(String[] args) { + String schema = "type Query { hello: String }"; + TypeDefinitionRegistry typeDefinitionRegistry = new SchemaParser().parse(schema); + RuntimeWiring runtimeWiring = RuntimeWiring.newRuntimeWiring() + .type("Query", builder -> builder.dataFetcher("hello", environment -> "world")) + .build(); + GraphQLSchema graphQLSchema = new SchemaGenerator().makeExecutableSchema(typeDefinitionRegistry, runtimeWiring); + GraphQL graphQL = GraphQL.newGraphQL(graphQLSchema).build(); + ExecutionInput executionInput = ExecutionInput.newExecutionInput().query("{ hello alias: hello alias2: hello }").build(); + GraphQLContext graphQLContext = executionInput.getGraphQLContext(); + ExecutionResult executionResult = graphQL.execute(executionInput); + System.out.println(executionResult.getData().toString()); + ExecutionTrackingResult executionTrackingResult = graphQLContext.get(ExecutionTrackingResult.EXECUTION_TRACKING_KEY); + if (executionTrackingResult == null) { + System.out.println("No tracking data found"); + System.exit(1); + } + if (executionTrackingResult.timePerPath.size() != 3) { + System.out.println("Expected 3 paths, got " + executionTrackingResult.timePerPath.size()); + System.exit(1); + } + System.out.println("Successfully tracked execution"); + System.exit(0); + } +} diff --git a/agent-test/src/test/java/graphql/test/AgentTest.java b/agent-test/src/test/java/graphql/test/AgentTest.java new file mode 100644 index 0000000000..a0add765c2 --- /dev/null +++ b/agent-test/src/test/java/graphql/test/AgentTest.java @@ -0,0 +1,72 @@ +package graphql.test; + +import graphql.agent.result.ExecutionTrackingResult; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; + +import java.util.Collections; +import java.util.List; +import java.util.Map; + +import static org.assertj.core.api.Assertions.assertThat; + +public class AgentTest { + + @BeforeAll + static void init() { + LoadAgent.loadIntoCurrentJVM(); + } + + @AfterAll + static void cleanup() { + } + + @Test + void test() { + ExecutionTrackingResult executionTrackingResult = TestQuery.executeQuery(); + assertThat(executionTrackingResult.dataFetcherCount()).isEqualTo(5); + assertThat(executionTrackingResult.getTime("/issues")).isGreaterThan(100); + assertThat(executionTrackingResult.getDfResultTypes("/issues")) + .isEqualTo(ExecutionTrackingResult.DFResultType.DONE_OK); + + verifyAgentDataIsEmpty(); + + } + + @Test + void testBatchLoader() { + ExecutionTrackingResult executionTrackingResult = TestQuery.executeBatchedQuery(); + assertThat(executionTrackingResult.dataFetcherCount()).isEqualTo(9); + assertThat(executionTrackingResult.getTime("/issues")).isGreaterThan(100); + assertThat(executionTrackingResult.getDfResultTypes("/issues[0]/author")) + .isEqualTo(ExecutionTrackingResult.DFResultType.PENDING); + assertThat(executionTrackingResult.getDfResultTypes("/issues[1]/author")) + .isEqualTo(ExecutionTrackingResult.DFResultType.PENDING); + + assertThat(executionTrackingResult.getDataLoaderNames()).isEqualTo(Collections.singletonList("userLoader")); + + assertThat(executionTrackingResult.dataLoaderNameToBatchCall).hasSize(1); + List userLoaderCalls = executionTrackingResult.dataLoaderNameToBatchCall.get("userLoader"); + assertThat(userLoaderCalls).hasSize(1); + ExecutionTrackingResult.BatchLoadingCall batchLoadingCall = userLoaderCalls.get(0); + + assertThat(batchLoadingCall.keyCount).isEqualTo(2); + + verifyAgentDataIsEmpty(); + } + + private void verifyAgentDataIsEmpty() { + try { + Class agent = Class.forName("graphql.agent.GraphQLJavaAgent"); + Map executionIdToData = (Map) agent.getField("executionIdToData").get(null); + Map dataLoaderToExecutionId = (Map) agent.getField("dataLoaderToExecutionId").get(null); + assertThat(executionIdToData).isEmpty(); + assertThat(dataLoaderToExecutionId).isEmpty(); + + } catch (Exception e) { + throw new RuntimeException(e); + } + + } +} diff --git a/agent-test/src/test/java/graphql/test/LoadAgent.java b/agent-test/src/test/java/graphql/test/LoadAgent.java new file mode 100644 index 0000000000..90ec46a078 --- /dev/null +++ b/agent-test/src/test/java/graphql/test/LoadAgent.java @@ -0,0 +1,15 @@ +package graphql.test; + +import net.bytebuddy.agent.ByteBuddyAgent; + +import java.io.File; + + +public class LoadAgent { + + + public static void loadIntoCurrentJVM() { + ByteBuddyAgent.attach(new File("../agent/build/libs/agent.jar"), String.valueOf(ProcessHandle.current().pid())); + } + +} diff --git a/agent-test/src/test/java/graphql/test/StartAgentOnStartupTest.java b/agent-test/src/test/java/graphql/test/StartAgentOnStartupTest.java new file mode 100644 index 0000000000..e4c8c3add6 --- /dev/null +++ b/agent-test/src/test/java/graphql/test/StartAgentOnStartupTest.java @@ -0,0 +1,23 @@ +package graphql.test; + +import org.junit.jupiter.api.Test; + +import java.io.IOException; + +import static org.assertj.core.api.Assertions.assertThat; + +public class StartAgentOnStartupTest { + + + @Test + void testAgentCanBeLoadedAtStartup() throws IOException, InterruptedException { + // we use the classpath of the current test + String classPath = System.getProperty("java.class.path"); + ProcessBuilder processBuilder = new ProcessBuilder("java", "-javaagent:../agent/build/libs/agent.jar", "-classpath", classPath, "graphql.GraphQLApp"); + Process process = processBuilder.start(); + process.getErrorStream().transferTo(System.err); + process.getInputStream().transferTo(System.out); + int i = process.waitFor(); + assertThat(i).isZero(); + } +} diff --git a/agent-test/src/test/java/graphql/test/TestQuery.java b/agent-test/src/test/java/graphql/test/TestQuery.java new file mode 100644 index 0000000000..2755cf230f --- /dev/null +++ b/agent-test/src/test/java/graphql/test/TestQuery.java @@ -0,0 +1,102 @@ +package graphql.test; + +import graphql.ExecutionInput; +import graphql.ExecutionResult; +import graphql.GraphQL; +import graphql.agent.result.ExecutionTrackingResult; +import graphql.schema.DataFetcher; +import graphql.schema.GraphQLSchema; +import graphql.schema.idl.RuntimeWiring; +import graphql.schema.idl.SchemaGenerator; +import graphql.schema.idl.SchemaParser; +import graphql.schema.idl.TypeDefinitionRegistry; +import org.assertj.core.api.Assertions; +import org.dataloader.BatchLoader; +import org.dataloader.DataLoader; +import org.dataloader.DataLoaderFactory; +import org.dataloader.DataLoaderRegistry; + +import java.util.List; +import java.util.Map; +import java.util.concurrent.CompletableFuture; + +public class TestQuery { + + + static ExecutionTrackingResult executeQuery() { + String sdl = "type Query{issues: [Issue]} type Issue {id: ID, title: String}"; + TypeDefinitionRegistry typeDefinitionRegistry = new SchemaParser().parse(sdl); + DataFetcher issuesDF = (env) -> { + return List.of( + Map.of("id", "1", "title", "issue-1"), + Map.of("id", "2", "title", "issue-2")); + }; + + RuntimeWiring runtimeWiring = RuntimeWiring.newRuntimeWiring() + .type("Query", builder -> builder.dataFetcher("issues", issuesDF)) + .build(); + GraphQLSchema graphQLSchema = new SchemaGenerator().makeExecutableSchema(typeDefinitionRegistry, runtimeWiring); + + GraphQL graphQL = GraphQL.newGraphQL(graphQLSchema).build(); + + ExecutionInput executionInput = ExecutionInput.newExecutionInput().query("{issues{id title}}").build(); + ExecutionResult result = graphQL.execute(executionInput); + Assertions.assertThat(result.getErrors()).isEmpty(); + ExecutionTrackingResult trackingResult = executionInput.getGraphQLContext().get(ExecutionTrackingResult.EXECUTION_TRACKING_KEY); + return trackingResult; + } + + static ExecutionTrackingResult executeBatchedQuery() { + String sdl = "type Query{issues: [Issue]} " + + "type Issue {id: ID, author: User}" + + "type User {id: ID, name: String}"; + + DataFetcher issuesDF = (env) -> List.of( + Map.of("id", "1", "title", "issue-1", "authorId", "user-1"), + Map.of("id", "2", "title", "issue-2", "authorId", "user-2")); + + BatchLoader userBatchLoader = keys -> { + // System.out.println("batch users with keys: " + keys); + return CompletableFuture.supplyAsync(() -> { + try { + Thread.sleep(100); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + return List.of( + Map.of("id", "user-1", "name", "Foo-1"), + Map.of("id", "user-2", "name", "Foo-2") + ); + }); + }; + DataLoaderRegistry dataLoaderRegistry = new DataLoaderRegistry(); + dataLoaderRegistry.register("userLoader", DataLoaderFactory.newDataLoader(userBatchLoader)); + + DataFetcher> authorDF = (env) -> { + DataLoader userLoader = env.getDataLoader("userLoader"); + // System.out.println("author id: " + (String) ((Map) env.getSource()).get("authorId")); + return userLoader.load((String) ((Map) env.getSource()).get("authorId")); + }; + TypeDefinitionRegistry typeDefinitionRegistry = new SchemaParser().parse(sdl); + + RuntimeWiring runtimeWiring = RuntimeWiring.newRuntimeWiring() + .type("Query", builder -> builder.dataFetcher("issues", issuesDF)) + .type("Issue", builder -> builder.dataFetcher("author", authorDF)) + .build(); + GraphQLSchema graphQLSchema = new SchemaGenerator().makeExecutableSchema(typeDefinitionRegistry, runtimeWiring); + + GraphQL graphQL = GraphQL.newGraphQL(graphQLSchema).build(); + String query = "{issues" + + "{id author {id name}}" + + "}"; + ExecutionInput executionInput = ExecutionInput.newExecutionInput() + .dataLoaderRegistry(dataLoaderRegistry) + .query(query).build(); + ExecutionResult result = graphQL.execute(executionInput); + Assertions.assertThat(result.getErrors()).isEmpty(); + ExecutionTrackingResult trackingResult = executionInput.getGraphQLContext().get(ExecutionTrackingResult.EXECUTION_TRACKING_KEY); + return trackingResult; + } + + +} diff --git a/agent/build.gradle b/agent/build.gradle new file mode 100644 index 0000000000..1cc23ecb3c --- /dev/null +++ b/agent/build.gradle @@ -0,0 +1,124 @@ +plugins { + id 'java' + id 'java-library' + id 'maven-publish' + id "com.github.johnrengelman.shadow" version "8.1.1" +} + +dependencies { + implementation("net.bytebuddy:byte-buddy:1.14.13") + // graphql-java itself + implementation(rootProject) +} + +repositories { + mavenCentral() + mavenLocal() +} + + +java { + toolchain { + languageVersion = JavaLanguageVersion.of(11) + } +} + +shadowJar { + minimize() + archiveClassifier.set('') + configurations = [project.configurations.compileClasspath] + dependencies { + exclude(dependency(rootProject)) + } + manifest { + attributes( + 'Agent-Class': 'graphql.agent.GraphQLJavaAgent', + 'Premain-Class': 'graphql.agent.GraphQLJavaAgent', + 'Can-Redefine-Classes': 'true', + 'Can-Retransform-Classes': 'true', + ) + } +} + +task sourcesJar(type: Jar) { + dependsOn classes + archiveClassifier = 'sources' + from sourceSets.main.allSource +} + +task javadocJar(type: Jar, dependsOn: javadoc) { + archiveClassifier = 'javadoc' + from javadoc.destinationDir +} + +publishing { + + publications { + + agent(MavenPublication) { + version rootProject.version + group rootProject.group + artifactId 'graphql-java-agent' + from components.java + + artifact sourcesJar { + archiveClassifier = "sources" + } + artifact javadocJar { + archiveClassifier = "javadoc" + } + pom.withXml { + // removing the shaded dependencies from the pom + def pomNode = asNode() + pomNode.dependencies.'*'.findAll() { + it.artifactId.text() == 'graphql-java' || it.artifactId.text() == 'byte-buddy' + }.each() { + it.parent().remove(it) + } + pomNode.children().last() + { + resolveStrategy = Closure.DELEGATE_FIRST + name 'graphql-java-agent' + description 'GraphqL Java Agent' + url "https://github.com/graphql-java/graphql-java" + scm { + url "https://github.com/graphql-java/graphql-java" + connection "https://github.com/graphql-java/graphql-java" + developerConnection "https://github.com/graphql-java/graphql-java" + } + licenses { + license { + name 'MIT' + url 'https://github.com/graphql-java/graphql-java/blob/master/LICENSE.md' + distribution 'repo' + } + } + developers { + developer { + id 'andimarek' + name 'Andreas Marek' + } + } + } + } + } + } +} + +signing { + required { !project.hasProperty('publishToMavenLocal') } + def signingKey = System.env.MAVEN_CENTRAL_PGP_KEY + useInMemoryPgpKeys(signingKey, "") + sign publishing.publications +} + + +// all publish tasks depend on the build task +tasks.withType(PublishToMavenRepository) { + dependsOn build +} + +// Only publish Maven POM, disable default Gradle modules file +tasks.withType(GenerateModuleMetadata) { + enabled = false +} + diff --git a/agent/src/main/java/graphql/agent/GraphQLJavaAgent.java b/agent/src/main/java/graphql/agent/GraphQLJavaAgent.java new file mode 100644 index 0000000000..1dd9f1fe42 --- /dev/null +++ b/agent/src/main/java/graphql/agent/GraphQLJavaAgent.java @@ -0,0 +1,300 @@ +package graphql.agent; + +import graphql.agent.result.ExecutionTrackingResult; +import graphql.execution.ExecutionContext; +import graphql.execution.ExecutionId; +import graphql.execution.ExecutionStrategyParameters; +import graphql.execution.ResultPath; +import graphql.schema.DataFetchingEnvironment; +import net.bytebuddy.agent.builder.AgentBuilder; +import net.bytebuddy.asm.Advice; +import net.bytebuddy.implementation.bytecode.assign.Assigner; +import org.dataloader.DataLoader; +import org.dataloader.DataLoaderRegistry; +import org.dataloader.DispatchResult; + +import java.lang.instrument.Instrumentation; +import java.lang.reflect.Field; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.ConcurrentHashMap; +import java.util.function.BiConsumer; + +import static graphql.agent.result.ExecutionTrackingResult.DFResultType.DONE_CANCELLED; +import static graphql.agent.result.ExecutionTrackingResult.DFResultType.DONE_EXCEPTIONALLY; +import static graphql.agent.result.ExecutionTrackingResult.DFResultType.DONE_OK; +import static graphql.agent.result.ExecutionTrackingResult.DFResultType.PENDING; +import static graphql.agent.result.ExecutionTrackingResult.EXECUTION_TRACKING_KEY; +import static net.bytebuddy.matcher.ElementMatchers.nameMatches; +import static net.bytebuddy.matcher.ElementMatchers.named; +import static net.bytebuddy.matcher.ElementMatchers.takesArguments; + +public class GraphQLJavaAgent { + + + public static final Map executionIdToData = new ConcurrentHashMap<>(); + public static final Map dataLoaderToExecutionId = new ConcurrentHashMap<>(); + + public static void premain(String agentArgs, Instrumentation inst) { + agentmain(agentArgs, inst); + } + + + public static void agentmain(String agentArgs, Instrumentation inst) { + System.out.println("GraphQL Java Agent is starting"); + new AgentBuilder.Default() + .type(named("graphql.execution.Execution")) + .transform((builder, typeDescription, classLoader, module, protectionDomain) -> { + return builder + .visit(Advice.to(ExecutionAdvice.class).on(nameMatches("executeOperation"))); + + }) + .type(named("graphql.execution.ExecutionStrategy")) + .transform((builder, typeDescription, classLoader, module, protectionDomain) -> { + return builder + .visit(Advice.to(DataFetcherInvokeAdvice.class).on(nameMatches("invokeDataFetcher"))); + }) + .type(named("org.dataloader.DataLoaderRegistry")) + .transform((builder, typeDescription, classLoader, module, protectionDomain) -> { + return builder + .visit(Advice.to(DataLoaderRegistryAdvice.class).on(nameMatches("dispatchAll"))); + }) + .type(named("org.dataloader.DataLoader")) + .transform((builder, typeDescription, classLoader, module, protectionDomain) -> { + return builder + .visit(Advice.to(DataLoaderLoadAdvice.class).on(nameMatches("load"))); + }) + .type(named("org.dataloader.DataLoaderHelper")) + .transform((builder, typeDescription, classLoader, module, protectionDomain) -> { + return builder + .visit(Advice.to(DataLoaderHelperDispatchAdvice.class).on(nameMatches("dispatch"))) + .visit(Advice.to(DataLoaderHelperInvokeBatchLoaderAdvice.class) + .on(nameMatches("invokeLoader").and(takesArguments(List.class, List.class)))); + }) + .type(named("graphql.schema.DataFetchingEnvironmentImpl")) + .transform((builder, typeDescription, classLoader, module, protectionDomain) -> { + return builder + .visit(Advice.to(DataFetchingEnvironmentAdvice.class).on(nameMatches("getDataLoader"))); + }) + .disableClassFormatChanges() + .installOn(inst); + + } + + public static class ExecutionAdvice { + + public static class AfterExecutionHandler implements BiConsumer { + + private final ExecutionContext executionContext; + + public AfterExecutionHandler(ExecutionContext executionContext) { + this.executionContext = executionContext; + } + + public void accept(Object o, Throwable throwable) { + ExecutionId executionId = executionContext.getExecutionId(); + ExecutionTrackingResult executionTrackingResult = GraphQLJavaAgent.executionIdToData.get(executionId); + executionTrackingResult.endExecutionTime.set(System.nanoTime()); + executionTrackingResult.endThread.set(Thread.currentThread().getName()); + executionContext.getGraphQLContext().put(EXECUTION_TRACKING_KEY, executionTrackingResult); + // cleanup + for (DataLoader dataLoader : executionTrackingResult.dataLoaderToName.keySet()) { + dataLoaderToExecutionId.remove(dataLoader); + } + executionIdToData.remove(executionId); + + } + + } + + + @Advice.OnMethodEnter + public static void executeOperationEnter(@Advice.Argument(0) ExecutionContext executionContext) { + ExecutionTrackingResult executionTrackingResult = new ExecutionTrackingResult(); + executionTrackingResult.startExecutionTime.set(System.nanoTime()); + executionTrackingResult.startThread.set(Thread.currentThread().getName()); + executionContext.getGraphQLContext().put(EXECUTION_TRACKING_KEY, new ExecutionTrackingResult()); + + GraphQLJavaAgent.executionIdToData.put(executionContext.getExecutionId(), executionTrackingResult); + + DataLoaderRegistry dataLoaderRegistry = executionContext.getDataLoaderRegistry(); + for (String name : dataLoaderRegistry.getDataLoadersMap().keySet()) { + DataLoader dataLoader = dataLoaderRegistry.getDataLoader(name); + GraphQLJavaAgent.dataLoaderToExecutionId.put(dataLoader, executionContext.getExecutionId()); + executionTrackingResult.dataLoaderToName.put(dataLoader, name); + } + } + + @Advice.OnMethodExit + public static void executeOperationExit(@Advice.Argument(0) ExecutionContext executionContext, + @Advice.Return(typing = Assigner.Typing.DYNAMIC) CompletableFuture result) { + + result.whenComplete(new AfterExecutionHandler(executionContext)); + } + } + + public static class DataFetcherInvokeAdvice { + + public static class DataFetcherFinishedHandler implements BiConsumer { + + private final ExecutionContext executionContext; + private final ExecutionStrategyParameters parameters; + private final long startTime; + + public DataFetcherFinishedHandler(ExecutionContext executionContext, ExecutionStrategyParameters parameters, long startTime) { + this.executionContext = executionContext; + this.parameters = parameters; + this.startTime = startTime; + } + + @Override + public void accept(Object o, Throwable throwable) { + ExecutionId executionId = executionContext.getExecutionId(); + ExecutionTrackingResult executionTrackingResult = GraphQLJavaAgent.executionIdToData.get(executionId); + ResultPath path = parameters.getPath(); + executionTrackingResult.finishedTimePerPath.put(path, System.nanoTime() - startTime); + executionTrackingResult.finishedThreadPerPath.put(path, Thread.currentThread().getName()); + } + } + + @Advice.OnMethodEnter + public static void invokeDataFetcherEnter(@Advice.Argument(0) ExecutionContext executionContext, + @Advice.Argument(1) ExecutionStrategyParameters parameters) { + ExecutionTrackingResult executionTrackingResult = GraphQLJavaAgent.executionIdToData.get(executionContext.getExecutionId()); + executionTrackingResult.start(parameters.getPath(), System.nanoTime()); + executionTrackingResult.startInvocationThreadPerPath.put(parameters.getPath(), Thread.currentThread().getName()); + } + + @Advice.OnMethodExit + public static void invokeDataFetcherExit(@Advice.Argument(0) ExecutionContext executionContext, + @Advice.Argument(1) ExecutionStrategyParameters parameters, + @Advice.Return(readOnly = false) Object cfOrObject) { + // ExecutionTrackingResult executionTrackingResult = executionContext.getGraphQLContext().get(EXECUTION_TRACKING_KEY); + ExecutionTrackingResult executionTrackingResult = GraphQLJavaAgent.executionIdToData.get(executionContext.getExecutionId()); + ResultPath path = parameters.getPath(); + long startTime = executionTrackingResult.timePerPath.get(path); + executionTrackingResult.end(path, System.nanoTime()); + if (cfOrObject instanceof CompletableFuture) { + CompletableFuture result = (CompletableFuture) cfOrObject; + if (result.isDone()) { + if (result.isCancelled()) { + executionTrackingResult.setDfResultTypes(path, DONE_CANCELLED); + } else if (result.isCompletedExceptionally()) { + executionTrackingResult.setDfResultTypes(path, DONE_EXCEPTIONALLY); + } else { + executionTrackingResult.setDfResultTypes(path, DONE_OK); + } + } else { + executionTrackingResult.setDfResultTypes(path, PENDING); + } + // overriding the result to make sure the finished handler is called first when the DF is finished + // otherwise it is a completion tree instead of chain + cfOrObject = result.whenComplete(new DataFetcherFinishedHandler(executionContext, parameters, startTime)); + } else { + // materialized value - not a CF + executionTrackingResult.setDfResultTypes(path, DONE_OK); + new DataFetcherFinishedHandler(executionContext, parameters, startTime).accept(cfOrObject, null); + } + } + + } + + + public static class DataLoaderHelperInvokeBatchLoaderAdvice { + + @Advice.OnMethodEnter + public static void invokeLoader(@Advice.Argument(0) List keys, + @Advice.Argument(1) List keysContext, + @Advice.This(typing = Assigner.Typing.DYNAMIC) Object dataLoaderHelper) { + DataLoader dataLoader = getDataLoaderForHelper(dataLoaderHelper); + ExecutionId executionId = GraphQLJavaAgent.dataLoaderToExecutionId.get(dataLoader); + ExecutionTrackingResult executionTrackingResult = GraphQLJavaAgent.executionIdToData.get(executionId); + String dataLoaderName = executionTrackingResult.dataLoaderToName.get(dataLoader); + + synchronized (executionTrackingResult.dataLoaderNameToBatchCall) { + executionTrackingResult.dataLoaderNameToBatchCall.putIfAbsent(dataLoaderName, new ArrayList<>()); + executionTrackingResult.dataLoaderNameToBatchCall.get(dataLoaderName) + .add(new ExecutionTrackingResult.BatchLoadingCall(keys.size(), Thread.currentThread().getName())); + } + + } + } + + public static class DataLoaderHelperDispatchAdvice { + + @Advice.OnMethodExit + public static void dispatch(@Advice.This(typing = Assigner.Typing.DYNAMIC) Object dataLoaderHelper, + @Advice.Return(typing = Assigner.Typing.DYNAMIC) DispatchResult dispatchResult) { + try { + // System.out.println("dataloader helper Dispatch " + dataLoaderHelper + " load for execution " + dispatchResult); + // DataLoader dataLoader = getDataLoaderForHelper(dataLoaderHelper); + // // System.out.println("dataLoader: " + dataLoader); + // ExecutionId executionId = GraphQLJavaAgent.dataLoaderToExecutionId.get(dataLoader); + // ExecutionTrackingResult ExecutionTrackingResult = GraphQLJavaAgent.executionIdToData.get(executionId); + // String dataLoaderName = ExecutionTrackingResult.dataLoaderToName.get(dataLoader); + // + // ExecutionTrackingResult.dataLoaderNameToBatchCall.putIfAbsent(dataLoaderName, new ArrayList<>()); + // ExecutionTrackingResult.dataLoaderNameToBatchCall.get(dataLoaderName).add(new ExecutionTrackingResult.BatchLoadingCall(dispatchResult.getKeysCount())); + + } catch (Exception e) { + e.printStackTrace(); + } + + } + + } + + public static DataLoader getDataLoaderForHelper(Object dataLoaderHelper) { + try { + Field field = dataLoaderHelper.getClass().getDeclaredField("dataLoader"); + field.setAccessible(true); + return (DataLoader) field.get(dataLoaderHelper); + } catch (Exception e) { + e.printStackTrace(); + throw new RuntimeException(e); + } + } + + +} + +class DataFetchingEnvironmentAdvice { + + + @Advice.OnMethodExit + public static void getDataLoader(@Advice.Argument(0) String dataLoaderName, + @Advice.This(typing = Assigner.Typing.DYNAMIC) DataFetchingEnvironment dataFetchingEnvironment, + @Advice.Return(readOnly = false, typing = Assigner.Typing.DYNAMIC) DataLoader dataLoader) { + ExecutionTrackingResult executionTrackingResult = GraphQLJavaAgent.executionIdToData.get(dataFetchingEnvironment.getExecutionId()); + ResultPath resultPath = dataFetchingEnvironment.getExecutionStepInfo().getPath(); + executionTrackingResult.resultPathToDataLoaderUsed.put(resultPath, dataLoaderName); + + } + +} + + +class DataLoaderLoadAdvice { + + @Advice.OnMethodEnter + public static void load(@Advice.This(typing = Assigner.Typing.DYNAMIC) Object dataLoader) { + ExecutionId executionId = GraphQLJavaAgent.dataLoaderToExecutionId.get(dataLoader); + String dataLoaderName = GraphQLJavaAgent.executionIdToData.get(executionId).dataLoaderToName.get(dataLoader); + } + +} + +class DataLoaderRegistryAdvice { + + @Advice.OnMethodEnter + public static void dispatchAll(@Advice.This(typing = Assigner.Typing.DYNAMIC) Object dataLoaderRegistry) { + List> dataLoaders = ((DataLoaderRegistry) dataLoaderRegistry).getDataLoaders(); + ExecutionId executionId = GraphQLJavaAgent.dataLoaderToExecutionId.get(dataLoaders.get(0)); + } + +} + + + diff --git a/build.gradle b/build.gradle index e416a40faa..3750da10cb 100644 --- a/build.gradle +++ b/build.gradle @@ -9,9 +9,9 @@ plugins { id 'signing' id "com.github.johnrengelman.shadow" version "8.1.1" id "biz.aQute.bnd.builder" version "6.4.0" - id "io.github.gradle-nexus.publish-plugin" version "1.3.0" + id "io.github.gradle-nexus.publish-plugin" version "2.0.0" id "groovy" - id "me.champeau.jmh" version "0.7.1" + id "me.champeau.jmh" version "0.7.2" } java { @@ -37,6 +37,7 @@ def getDevelopmentVersion() { return makeDevelopmentVersion(["0.0.0", dateTime, "no-git"]) } + // a default Github Action env variable set to 'true' def isCi = Boolean.parseBoolean(System.env.CI) if (isCi) { def gitHashOutput = new StringBuilder() @@ -62,7 +63,6 @@ def getDevelopmentVersion() { } def reactiveStreamsVersion = '1.0.3' -def slf4jVersion = '2.0.7' def releaseVersion = System.env.RELEASE_VERSION def antlrVersion = '4.11.1' // https://mvnrepository.com/artifact/org.antlr/antlr4-runtime def guavaVersion = '32.1.2-jre' @@ -99,28 +99,26 @@ jar { } dependencies { - compileOnly 'org.jetbrains:annotations:24.0.1' + compileOnly 'org.jetbrains:annotations:24.1.0' implementation 'org.antlr:antlr4-runtime:' + antlrVersion - implementation 'org.slf4j:slf4j-api:' + slf4jVersion - api 'com.graphql-java:java-dataloader:3.2.1' + api 'com.graphql-java:java-dataloader:3.3.0' api 'org.reactivestreams:reactive-streams:' + reactiveStreamsVersion antlr 'org.antlr:antlr4:' + antlrVersion implementation 'com.google.guava:guava:' + guavaVersion testImplementation group: 'junit', name: 'junit', version: '4.13.2' testImplementation 'org.spockframework:spock-core:2.0-groovy-3.0' - testImplementation 'org.codehaus.groovy:groovy:3.0.19' - testImplementation 'org.codehaus.groovy:groovy-json:3.0.19' + testImplementation 'org.codehaus.groovy:groovy:3.0.21' + testImplementation 'org.codehaus.groovy:groovy-json:3.0.21' testImplementation 'com.google.code.gson:gson:2.10.1' - testImplementation 'org.eclipse.jetty:jetty-server:11.0.15' - testImplementation 'com.fasterxml.jackson.core:jackson-databind:2.15.3' - testImplementation 'org.slf4j:slf4j-simple:' + slf4jVersion + testImplementation 'org.eclipse.jetty:jetty-server:11.0.20' + testImplementation 'com.fasterxml.jackson.core:jackson-databind:2.17.0' testImplementation 'org.awaitility:awaitility-groovy:4.2.0' testImplementation 'com.github.javafaker:javafaker:1.0.2' testImplementation 'org.reactivestreams:reactive-streams-tck:' + reactiveStreamsVersion testImplementation "io.reactivex.rxjava2:rxjava:2.2.21" - testImplementation 'org.testng:testng:7.8.0' // use for reactive streams test inheritance + testImplementation 'org.testng:testng:7.10.1' // use for reactive streams test inheritance testImplementation 'org.openjdk.jmh:jmh-core:1.37' testAnnotationProcessor 'org.openjdk.jmh:jmh-generator-annprocess:1.37' @@ -350,8 +348,8 @@ nexusPublishing { } } -// to publish to local maven repo skip signing: ./gradlew publishToMavenLocal -x signGraphqlJavaPublication signing { + required { !project.hasProperty('publishToMavenLocal') } def signingKey = System.env.MAVEN_CENTRAL_PGP_KEY useInMemoryPgpKeys(signingKey, "") sign publishing.publications diff --git a/gradle/wrapper/gradle-wrapper.jar b/gradle/wrapper/gradle-wrapper.jar index ccebba7710..d64cd49177 100644 Binary files a/gradle/wrapper/gradle-wrapper.jar and b/gradle/wrapper/gradle-wrapper.jar differ diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties index bdc9a83b1e..a80b22ce5c 100644 --- a/gradle/wrapper/gradle-wrapper.properties +++ b/gradle/wrapper/gradle-wrapper.properties @@ -1,6 +1,7 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionUrl=https\://services.gradle.org/distributions/gradle-8.0.2-bin.zip +distributionUrl=https\://services.gradle.org/distributions/gradle-8.6-bin.zip networkTimeout=10000 +validateDistributionUrl=true zipStoreBase=GRADLE_USER_HOME zipStorePath=wrapper/dists diff --git a/gradlew b/gradlew index 79a61d421c..1aa94a4269 100755 --- a/gradlew +++ b/gradlew @@ -83,10 +83,8 @@ done # This is normally unused # shellcheck disable=SC2034 APP_BASE_NAME=${0##*/} -APP_HOME=$( cd "${APP_HOME:-./}" && pwd -P ) || exit - -# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. -DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"' +# Discard cd standard output in case $CDPATH is set (https://github.com/gradle/gradle/issues/25036) +APP_HOME=$( cd "${APP_HOME:-./}" > /dev/null && pwd -P ) || exit # Use the maximum available, or set MAX_FD != -1 to use that value. MAX_FD=maximum @@ -133,10 +131,13 @@ location of your Java installation." fi else JAVACMD=java - which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. + if ! command -v java >/dev/null 2>&1 + then + die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. Please set the JAVA_HOME variable in your environment to match the location of your Java installation." + fi fi # Increase the maximum file descriptors if we can. @@ -144,7 +145,7 @@ if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then case $MAX_FD in #( max*) # In POSIX sh, ulimit -H is undefined. That's why the result is checked to see if it worked. - # shellcheck disable=SC3045 + # shellcheck disable=SC2039,SC3045 MAX_FD=$( ulimit -H -n ) || warn "Could not query maximum file descriptor limit" esac @@ -152,7 +153,7 @@ if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then '' | soft) :;; #( *) # In POSIX sh, ulimit -n is undefined. That's why the result is checked to see if it worked. - # shellcheck disable=SC3045 + # shellcheck disable=SC2039,SC3045 ulimit -n "$MAX_FD" || warn "Could not set maximum file descriptor limit to $MAX_FD" esac @@ -197,11 +198,15 @@ if "$cygwin" || "$msys" ; then done fi -# Collect all arguments for the java command; -# * $DEFAULT_JVM_OPTS, $JAVA_OPTS, and $GRADLE_OPTS can contain fragments of -# shell script including quotes and variable substitutions, so put them in -# double quotes to make sure that they get re-expanded; and -# * put everything else in single quotes, so that it's not re-expanded. + +# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. +DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"' + +# Collect all arguments for the java command: +# * DEFAULT_JVM_OPTS, JAVA_OPTS, JAVA_OPTS, and optsEnvironmentVar are not allowed to contain shell fragments, +# and any embedded shellness will be escaped. +# * For example: A user cannot expect ${Hostname} to be expanded, as it is an environment variable and will be +# treated as '${Hostname}' itself on the command line. set -- \ "-Dorg.gradle.appname=$APP_BASE_NAME" \ diff --git a/settings.gradle b/settings.gradle index 160b054c14..e337acd34b 100644 --- a/settings.gradle +++ b/settings.gradle @@ -14,3 +14,4 @@ pluginManagement { } rootProject.name = 'graphql-java' +include("agent", "agent-test") diff --git a/src/main/java/graphql/Assert.java b/src/main/java/graphql/Assert.java index 5fcbf9dbe9..872e19c05e 100644 --- a/src/main/java/graphql/Assert.java +++ b/src/main/java/graphql/Assert.java @@ -10,63 +10,92 @@ @Internal public class Assert { + public static T assertNotNullWithNPE(T object, Supplier msg) { + if (object != null) { + return object; + } + throw new NullPointerException(msg.get()); + } + + public static T assertNotNull(T object) { + if (object != null) { + return object; + } + return throwAssert("Object required to be not null"); + } + public static T assertNotNull(T object, Supplier msg) { if (object != null) { return object; } - throw new AssertException(msg.get()); + return throwAssert(msg.get()); } - public static T assertNotNullWithNPE(T object, Supplier msg) { + public static T assertNotNull(T object, String constantMsg) { if (object != null) { return object; } - throw new NullPointerException(msg.get()); + return throwAssert(constantMsg); } - public static T assertNotNull(T object) { + public static T assertNotNull(T object, String msgFmt, Object arg1) { if (object != null) { return object; } - throw new AssertException("Object required to be not null"); + return throwAssert(msgFmt, arg1); } + public static T assertNotNull(T object, String msgFmt, Object arg1, Object arg2) { + if (object != null) { + return object; + } + return throwAssert(msgFmt, arg1, arg2); + } + + public static T assertNotNull(T object, String msgFmt, Object arg1, Object arg2, Object arg3) { + if (object != null) { + return object; + } + return throwAssert(msgFmt, arg1, arg2, arg3); + } + + public static void assertNull(T object, Supplier msg) { if (object == null) { return; } - throw new AssertException(msg.get()); + throwAssert(msg.get()); } public static void assertNull(T object) { if (object == null) { return; } - throw new AssertException("Object required to be null"); + throwAssert("Object required to be null"); } public static T assertNeverCalled() { - throw new AssertException("Should never been called"); + return throwAssert("Should never been called"); } public static T assertShouldNeverHappen(String format, Object... args) { - throw new AssertException("Internal error: should never happen: " + format(format, args)); + return throwAssert("Internal error: should never happen: %s", format(format, args)); } public static T assertShouldNeverHappen() { - throw new AssertException("Internal error: should never happen"); + return throwAssert("Internal error: should never happen"); } public static Collection assertNotEmpty(Collection collection) { if (collection == null || collection.isEmpty()) { - throw new AssertException("collection must be not null and not empty"); + throwAssert("collection must be not null and not empty"); } return collection; } public static Collection assertNotEmpty(Collection collection, Supplier msg) { if (collection == null || collection.isEmpty()) { - throw new AssertException(msg.get()); + throwAssert(msg.get()); } return collection; } @@ -75,28 +104,84 @@ public static void assertTrue(boolean condition, Supplier msg) { if (condition) { return; } - throw new AssertException(msg.get()); + throwAssert(msg.get()); } public static void assertTrue(boolean condition) { if (condition) { return; } - throw new AssertException("condition expected to be true"); + throwAssert("condition expected to be true"); + } + + public static void assertTrue(boolean condition, String constantMsg) { + if (condition) { + return; + } + throwAssert(constantMsg); + } + + public static void assertTrue(boolean condition, String msgFmt, Object arg1) { + if (condition) { + return; + } + throwAssert(msgFmt, arg1); + } + + public static void assertTrue(boolean condition, String msgFmt, Object arg1, Object arg2) { + if (condition) { + return; + } + throwAssert(msgFmt, arg1, arg2); + } + + public static void assertTrue(boolean condition, String msgFmt, Object arg1, Object arg2, Object arg3) { + if (condition) { + return; + } + throwAssert(msgFmt, arg1, arg2, arg3); } public static void assertFalse(boolean condition, Supplier msg) { if (!condition) { return; } - throw new AssertException(msg.get()); + throwAssert(msg.get()); } public static void assertFalse(boolean condition) { if (!condition) { return; } - throw new AssertException("condition expected to be false"); + throwAssert("condition expected to be false"); + } + + public static void assertFalse(boolean condition, String constantMsg) { + if (!condition) { + return; + } + throwAssert(constantMsg); + } + + public static void assertFalse(boolean condition, String msgFmt, Object arg1) { + if (!condition) { + return; + } + throwAssert(msgFmt, arg1); + } + + public static void assertFalse(boolean condition, String msgFmt, Object arg1, Object arg2) { + if (!condition) { + return; + } + throwAssert(msgFmt, arg1, arg2); + } + + public static void assertFalse(boolean condition, String msgFmt, Object arg1, Object arg2, Object arg3) { + if (!condition) { + return; + } + throwAssert(msgFmt, arg1, arg2, arg3); } private static final String invalidNameErrorMessage = "Name must be non-null, non-empty and match [_A-Za-z][_0-9A-Za-z]* - was '%s'"; @@ -108,13 +193,17 @@ public static void assertFalse(boolean condition) { * currently non null, non empty, * * @param name - the name to be validated. + * * @return the name if valid, or AssertException if invalid. */ public static String assertValidName(String name) { if (name != null && !name.isEmpty() && validNamePattern.matcher(name).matches()) { return name; } - throw new AssertException(String.format(invalidNameErrorMessage, name)); + return throwAssert(invalidNameErrorMessage, name); } + private static T throwAssert(String format, Object... args) { + throw new AssertException(format(format, args)); + } } diff --git a/src/main/java/graphql/DeprecatedAt.java b/src/main/java/graphql/DeprecatedAt.java deleted file mode 100644 index 0918e5f6d6..0000000000 --- a/src/main/java/graphql/DeprecatedAt.java +++ /dev/null @@ -1,22 +0,0 @@ -package graphql; - -import java.lang.annotation.Retention; -import java.lang.annotation.RetentionPolicy; -import java.lang.annotation.Target; - -import static java.lang.annotation.ElementType.CONSTRUCTOR; -import static java.lang.annotation.ElementType.FIELD; -import static java.lang.annotation.ElementType.METHOD; -import static java.lang.annotation.ElementType.PACKAGE; -import static java.lang.annotation.ElementType.TYPE; - -/** - * Helper to track deprecation - * - * Please use ISO-8601 format i.e. YYYY-MM-DD - */ -@Retention(RetentionPolicy.SOURCE) -@Target(value = {CONSTRUCTOR, METHOD, TYPE, FIELD, PACKAGE}) -public @interface DeprecatedAt { - String value(); -} diff --git a/src/main/java/graphql/Directives.java b/src/main/java/graphql/Directives.java index 50e570e14a..8e0c81661e 100644 --- a/src/main/java/graphql/Directives.java +++ b/src/main/java/graphql/Directives.java @@ -1,6 +1,7 @@ package graphql; +import graphql.language.BooleanValue; import graphql.language.Description; import graphql.language.DirectiveDefinition; import graphql.language.StringValue; @@ -33,6 +34,7 @@ public class Directives { private static final String SPECIFIED_BY = "specifiedBy"; private static final String DEPRECATED = "deprecated"; private static final String ONE_OF = "oneOf"; + private static final String DEFER = "defer"; public static final String NO_LONGER_SUPPORTED = "No longer supported"; public static final DirectiveDefinition DEPRECATED_DIRECTIVE_DEFINITION; @@ -40,7 +42,6 @@ public class Directives { @ExperimentalApi public static final DirectiveDefinition ONE_OF_DIRECTIVE_DEFINITION; - static { DEPRECATED_DIRECTIVE_DEFINITION = DirectiveDefinition.newDirectiveDefinition() .name(DEPRECATED) @@ -77,6 +78,34 @@ public class Directives { .build(); } + /** + * The @defer directive can be used to defer sending data for a fragment until later in the query. + * This is an opt-in directive that is not available unless it is explicitly put into the schema. + *

+ * This implementation is based on the state of Defer/Stream PR + * More specifically at the state of this + * commit + *

+ * The execution behaviour should match what we get from running Apollo Server 4.9.5 with graphql-js v17.0.0-alpha.2 + */ + @ExperimentalApi + public static final GraphQLDirective DeferDirective = GraphQLDirective.newDirective() + .name(DEFER) + .description("This directive allows results to be deferred during execution") + .validLocations(FRAGMENT_SPREAD, INLINE_FRAGMENT) + .argument(newArgument() + .name("if") + .type(nonNull(GraphQLBoolean)) + .description("Deferred behaviour is controlled by this argument") + .defaultValueLiteral(BooleanValue.newBooleanValue(true).build()) + ) + .argument(newArgument() + .name("label") + .type(GraphQLString) + .description("A unique label that represents the fragment being deferred") + ) + .build(); + public static final GraphQLDirective IncludeDirective = GraphQLDirective.newDirective() .name("include") .description("Directs the executor to include this field or fragment only when the `if` argument is true") diff --git a/src/main/java/graphql/DirectivesUtil.java b/src/main/java/graphql/DirectivesUtil.java index 9a3e1fc3ac..e4a3d45d90 100644 --- a/src/main/java/graphql/DirectivesUtil.java +++ b/src/main/java/graphql/DirectivesUtil.java @@ -9,6 +9,7 @@ import graphql.util.FpKit; import java.util.Collection; +import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Optional; @@ -23,9 +24,7 @@ @Internal public class DirectivesUtil { - - @Deprecated // use GraphQLAppliedDirectives eventually - @DeprecatedAt("2022-02-24") + @Deprecated(since = "2022-02-24") // use GraphQLAppliedDirectives eventually public static Map nonRepeatableDirectivesByName(List directives) { // filter the repeatable directives List singletonDirectives = directives.stream() @@ -34,15 +33,13 @@ public static Map nonRepeatableDirectivesByName(List> allDirectivesByName(List directives) { return ImmutableMap.copyOf(FpKit.groupingBy(directives, GraphQLDirective::getName)); } - @Deprecated // use GraphQLAppliedDirectives eventually - @DeprecatedAt("2022-02-24") + @Deprecated(since = "2022-02-24") // use GraphQLAppliedDirectives eventually public static Optional directiveWithArg(List directives, String directiveName, String argumentName) { GraphQLDirective directive = nonRepeatableDirectivesByName(directives).get(directiveName); GraphQLArgument argument = null; @@ -52,9 +49,7 @@ public static Optional directiveWithArg(List return Optional.ofNullable(argument); } - - @Deprecated // use GraphQLAppliedDirectives eventually - @DeprecatedAt("2022-02-24") + @Deprecated(since = "2022-02-24") // use GraphQLAppliedDirectives eventually public static boolean isAllNonRepeatable(List directives) { if (directives == null || directives.isEmpty()) { return false; @@ -67,8 +62,7 @@ public static boolean isAllNonRepeatable(List directives) { return true; } - @Deprecated // use GraphQLAppliedDirectives eventually - @DeprecatedAt("2022-02-24") + @Deprecated(since = "2022-02-24") // use GraphQLAppliedDirectives eventually public static List add(List targetList, GraphQLDirective newDirective) { assertNotNull(targetList, () -> "directive list can't be null"); assertNotNull(newDirective, () -> "directive can't be null"); @@ -76,8 +70,7 @@ public static List add(List targetList, Grap return targetList; } - @Deprecated // use GraphQLAppliedDirectives eventually - @DeprecatedAt("2022-02-24") + @Deprecated(since = "2022-02-24") // use GraphQLAppliedDirectives eventually public static List addAll(List targetList, List newDirectives) { assertNotNull(targetList, () -> "directive list can't be null"); assertNotNull(newDirectives, () -> "directive list can't be null"); @@ -85,8 +78,7 @@ public static List addAll(List targetList, L return targetList; } - @Deprecated // use GraphQLAppliedDirectives eventually - @DeprecatedAt("2022-02-24") + @Deprecated(since = "2022-02-24") // use GraphQLAppliedDirectives eventually public static GraphQLDirective getFirstDirective(String name, Map> allDirectivesByName) { List directives = allDirectivesByName.getOrDefault(name, emptyList()); if (directives.isEmpty()) { @@ -100,7 +92,6 @@ public static GraphQLDirective getFirstDirective(String name, Map toAppliedDirectives(GraphQLDirectiveContainer directiveContainer) { @@ -113,7 +104,6 @@ public static List toAppliedDirectives(GraphQLDirective * * @param appliedDirectives the applied directives to use * @param directives the legacy directives to use - * * @return a combined list unique by name */ public static List toAppliedDirectives(Collection appliedDirectives, Collection directives) { @@ -136,6 +126,7 @@ public static List toAppliedDirectives(Collection> allDirectivesByName; private final ImmutableMap nonRepeatableDirectivesByName; @@ -154,7 +145,14 @@ public DirectivesHolder(Collection allDirectives, Collection directives, List appliedDirectives) { + if (directives.isEmpty() && appliedDirectives.isEmpty()) { + return EMPTY_HOLDER; + } + return new DirectivesHolder(directives, appliedDirectives); } public ImmutableMap> getAllDirectivesByName() { diff --git a/src/main/java/graphql/ExecutionInput.java b/src/main/java/graphql/ExecutionInput.java index f924aab7e4..18456034ca 100644 --- a/src/main/java/graphql/ExecutionInput.java +++ b/src/main/java/graphql/ExecutionInput.java @@ -3,7 +3,6 @@ import graphql.collect.ImmutableKit; import graphql.execution.ExecutionId; import graphql.execution.RawVariables; -import graphql.execution.instrumentation.dataloader.DataLoaderDispatcherInstrumentationState; import org.dataloader.DataLoaderRegistry; import java.util.Locale; @@ -12,6 +11,7 @@ import java.util.function.UnaryOperator; import static graphql.Assert.assertNotNull; +import static graphql.execution.instrumentation.dataloader.EmptyDataLoaderRegistryInstance.EMPTY_DATALOADER_REGISTRY; /** * This represents the series of values that can be input on a graphql query execution @@ -68,8 +68,7 @@ public String getOperationName() { * * @deprecated - use {@link #getGraphQLContext()} */ - @Deprecated - @DeprecatedAt("2021-07-05") + @Deprecated(since = "2021-07-05") public Object getContext() { return context; } @@ -214,7 +213,7 @@ public static class Builder { // this is important - it allows code to later known if we never really set a dataloader and hence it can optimize // dataloader field tracking away. // - private DataLoaderRegistry dataLoaderRegistry = DataLoaderDispatcherInstrumentationState.EMPTY_DATALOADER_REGISTRY; + private DataLoaderRegistry dataLoaderRegistry = EMPTY_DATALOADER_REGISTRY; private Locale locale = Locale.getDefault(); private ExecutionId executionId; @@ -273,47 +272,13 @@ public Builder localContext(Object localContext) { * * @deprecated - the {@link ExecutionInput#getGraphQLContext()} is a fixed mutable instance now */ - @Deprecated - @DeprecatedAt("2021-07-05") + @Deprecated(since = "2021-07-05") public Builder context(Object context) { this.context = context; return this; } - /** - * The legacy context object - * - * @param contextBuilder the context builder object to use - * - * @return this builder - * - * @deprecated - the {@link ExecutionInput#getGraphQLContext()} is a fixed mutable instance now - */ - @Deprecated - @DeprecatedAt("2021-07-05") - public Builder context(GraphQLContext.Builder contextBuilder) { - this.context = contextBuilder.build(); - return this; - } - - /** - * The legacy context object - * - * @param contextBuilderFunction the context builder function to use - * - * @return this builder - * - * @deprecated - the {@link ExecutionInput#getGraphQLContext()} is a fixed mutable instance now - */ - @Deprecated - @DeprecatedAt("2021-07-05") - public Builder context(UnaryOperator contextBuilderFunction) { - GraphQLContext.Builder builder = GraphQLContext.newContext(); - builder = contextBuilderFunction.apply(builder); - return context(builder.build()); - } - - /** + /** * This will give you a builder of {@link GraphQLContext} and any values you set will be copied * into the underlying {@link GraphQLContext} of this execution input * @@ -387,4 +352,4 @@ public ExecutionInput build() { return new ExecutionInput(this); } } -} \ No newline at end of file +} diff --git a/src/main/java/graphql/ExecutionResultImpl.java b/src/main/java/graphql/ExecutionResultImpl.java index 33ddd67e21..62419a63a7 100644 --- a/src/main/java/graphql/ExecutionResultImpl.java +++ b/src/main/java/graphql/ExecutionResultImpl.java @@ -40,6 +40,10 @@ public ExecutionResultImpl(ExecutionResultImpl other) { this(other.dataPresent, other.data, other.errors, other.extensions); } + public > ExecutionResultImpl(Builder builder) { + this(builder.dataPresent, builder.data, builder.errors, builder.extensions); + } + private ExecutionResultImpl(boolean dataPresent, Object data, List errors, Map extensions) { this.dataPresent = dataPresent; this.data = data; @@ -103,61 +107,61 @@ public String toString() { '}'; } - public static Builder newExecutionResult() { - return new Builder(); + public static > Builder newExecutionResult() { + return new Builder<>(); } - public static class Builder implements ExecutionResult.Builder { + public static class Builder> implements ExecutionResult.Builder { private boolean dataPresent; private Object data; private List errors = new ArrayList<>(); private Map extensions; @Override - public Builder from(ExecutionResult executionResult) { + public T from(ExecutionResult executionResult) { dataPresent = executionResult.isDataPresent(); data = executionResult.getData(); errors = new ArrayList<>(executionResult.getErrors()); extensions = executionResult.getExtensions(); - return this; + return (T) this; } @Override - public Builder data(Object data) { + public T data(Object data) { dataPresent = true; this.data = data; - return this; + return (T) this; } @Override - public Builder errors(List errors) { + public T errors(List errors) { this.errors = errors; - return this; + return (T) this; } @Override - public Builder addErrors(List errors) { + public T addErrors(List errors) { this.errors.addAll(errors); - return this; + return (T) this; } @Override - public Builder addError(GraphQLError error) { + public T addError(GraphQLError error) { this.errors.add(error); - return this; + return (T) this; } @Override - public Builder extensions(Map extensions) { + public T extensions(Map extensions) { this.extensions = extensions; - return this; + return (T) this; } @Override - public Builder addExtension(String key, Object value) { + public T addExtension(String key, Object value) { this.extensions = (this.extensions == null ? new LinkedHashMap<>() : this.extensions); this.extensions.put(key, value); - return this; + return (T) this; } @Override diff --git a/src/main/java/graphql/ExperimentalApi.java b/src/main/java/graphql/ExperimentalApi.java index c405ec10cf..80be253cd1 100644 --- a/src/main/java/graphql/ExperimentalApi.java +++ b/src/main/java/graphql/ExperimentalApi.java @@ -12,12 +12,16 @@ /** * This represents code that the graphql-java project considers experimental API and while our intention is that it will - * progress to be {@link PublicApi}, its existence, signature of behavior may change between releases. - * - * In general unnecessary changes will be avoided but you should not depend on experimental classes being stable + * progress to be {@link PublicApi}, its existence, signature or behavior may change between releases. + *

+ * In general unnecessary changes will be avoided, but you should not depend on experimental classes being stable. */ @Retention(RetentionPolicy.RUNTIME) @Target(value = {CONSTRUCTOR, METHOD, TYPE, FIELD}) @Documented public @interface ExperimentalApi { + /** + * The key that should be associated with a boolean value which indicates whether @defer and @stream behaviour is enabled for this execution. + */ + String ENABLE_INCREMENTAL_SUPPORT = "ENABLE_INCREMENTAL_SUPPORT"; } diff --git a/src/main/java/graphql/GraphQL.java b/src/main/java/graphql/GraphQL.java index 2ccb54b955..096a4f7e26 100644 --- a/src/main/java/graphql/GraphQL.java +++ b/src/main/java/graphql/GraphQL.java @@ -12,14 +12,11 @@ import graphql.execution.SimpleDataFetcherExceptionHandler; import graphql.execution.SubscriptionExecutionStrategy; import graphql.execution.ValueUnboxer; -import graphql.execution.instrumentation.ChainedInstrumentation; import graphql.execution.instrumentation.DocumentAndVariables; import graphql.execution.instrumentation.Instrumentation; import graphql.execution.instrumentation.InstrumentationContext; import graphql.execution.instrumentation.InstrumentationState; -import graphql.execution.instrumentation.NoContextChainedInstrumentation; import graphql.execution.instrumentation.SimplePerformantInstrumentation; -import graphql.execution.instrumentation.dataloader.DataLoaderDispatcherInstrumentation; import graphql.execution.instrumentation.parameters.InstrumentationCreateStateParameters; import graphql.execution.instrumentation.parameters.InstrumentationExecutionParameters; import graphql.execution.instrumentation.parameters.InstrumentationValidationParameters; @@ -28,12 +25,8 @@ import graphql.execution.preparsed.PreparsedDocumentProvider; import graphql.language.Document; import graphql.schema.GraphQLSchema; -import graphql.util.LogKit; import graphql.validation.ValidationError; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import java.util.ArrayList; import java.util.List; import java.util.Locale; import java.util.Optional; @@ -91,9 +84,6 @@ @PublicApi public class GraphQL { - private static final Logger log = LoggerFactory.getLogger(GraphQL.class); - private static final Logger logNotSafe = LogKit.getNotPrivacySafeLogger(GraphQL.class); - private final GraphQLSchema graphQLSchema; private final ExecutionStrategy queryStrategy; private final ExecutionStrategy mutationStrategy; @@ -102,6 +92,7 @@ public class GraphQL { private final Instrumentation instrumentation; private final PreparsedDocumentProvider preparsedDocumentProvider; private final ValueUnboxer valueUnboxer; + private final boolean doNotAutomaticallyDispatchDataLoader; private GraphQL(Builder builder) { @@ -113,6 +104,7 @@ private GraphQL(Builder builder) { this.instrumentation = assertNotNull(builder.instrumentation, () -> "instrumentation must not be null"); this.preparsedDocumentProvider = assertNotNull(builder.preparsedDocumentProvider, () -> "preparsedDocumentProvider must be non null"); this.valueUnboxer = assertNotNull(builder.valueUnboxer, () -> "valueUnboxer must not be null"); + this.doNotAutomaticallyDispatchDataLoader = builder.doNotAutomaticallyDispatchDataLoader; } /** @@ -157,6 +149,10 @@ public Instrumentation getInstrumentation() { return instrumentation; } + public boolean isDoNotAutomaticallyDispatchDataLoader() { + return doNotAutomaticallyDispatchDataLoader; + } + /** * @return the PreparsedDocumentProvider for this {@link GraphQL} instance */ @@ -215,7 +211,7 @@ public static class Builder { private ExecutionIdProvider idProvider = DEFAULT_EXECUTION_ID_PROVIDER; private Instrumentation instrumentation = null; // deliberate default here private PreparsedDocumentProvider preparsedDocumentProvider = NoOpPreparsedDocumentProvider.INSTANCE; - private boolean doNotAddDefaultInstrumentations = false; + private boolean doNotAutomaticallyDispatchDataLoader = false; private ValueUnboxer valueUnboxer = ValueUnboxer.DEFAULT; @@ -271,20 +267,15 @@ public Builder executionIdProvider(ExecutionIdProvider executionIdProvider) { return this; } + /** - * For performance reasons you can opt into situation where the default instrumentations (such - * as {@link graphql.execution.instrumentation.dataloader.DataLoaderDispatcherInstrumentation} will not be - * automatically added into the graphql instance. - *

- * For most situations this is not needed unless you are really pushing the boundaries of performance - *

- * By default a certain graphql instrumentations will be added to the mix to more easily enable certain functionality. This - * allows you to stop this behavior + * Deactivates the automatic dispatching of DataLoaders. + * If deactivated the user is responsible for dispatching the DataLoaders manually. * * @return this builder */ - public Builder doNotAddDefaultInstrumentations() { - this.doNotAddDefaultInstrumentations = true; + public Builder doNotAutomaticallyDispatchDataLoader() { + this.doNotAutomaticallyDispatchDataLoader = true; return this; } @@ -305,7 +296,9 @@ public GraphQL build() { this.subscriptionExecutionStrategy = new SubscriptionExecutionStrategy(this.defaultExceptionHandler); } - this.instrumentation = checkInstrumentationDefaultState(this.instrumentation, this.doNotAddDefaultInstrumentations); + if (instrumentation == null) { + this.instrumentation = SimplePerformantInstrumentation.INSTANCE; + } return new GraphQL(this); } } @@ -419,28 +412,24 @@ public CompletableFuture executeAsync(UnaryOperator executeAsync(ExecutionInput executionInput) { - if (logNotSafe.isDebugEnabled()) { - logNotSafe.debug("Executing request. operation name: '{}'. query: '{}'. variables '{}'", executionInput.getOperationName(), executionInput.getQuery(), executionInput.getVariables()); - } ExecutionInput executionInputWithId = ensureInputHasId(executionInput); - CompletableFuture instrumentationStateCF = instrumentation.createStateAsync(new InstrumentationCreateStateParameters(this.graphQLSchema, executionInput)); + CompletableFuture instrumentationStateCF = instrumentation.createStateAsync(new InstrumentationCreateStateParameters(this.graphQLSchema, executionInputWithId)); return Async.orNullCompletedFuture(instrumentationStateCF).thenCompose(instrumentationState -> { try { - InstrumentationExecutionParameters inputInstrumentationParameters = new InstrumentationExecutionParameters(executionInputWithId, this.graphQLSchema, instrumentationState); + InstrumentationExecutionParameters inputInstrumentationParameters = new InstrumentationExecutionParameters(executionInputWithId, this.graphQLSchema); ExecutionInput instrumentedExecutionInput = instrumentation.instrumentExecutionInput(executionInputWithId, inputInstrumentationParameters, instrumentationState); - CompletableFuture beginExecutionCF = new CompletableFuture<>(); - InstrumentationExecutionParameters instrumentationParameters = new InstrumentationExecutionParameters(instrumentedExecutionInput, this.graphQLSchema, instrumentationState); + InstrumentationExecutionParameters instrumentationParameters = new InstrumentationExecutionParameters(instrumentedExecutionInput, this.graphQLSchema); InstrumentationContext executionInstrumentation = nonNullCtx(instrumentation.beginExecution(instrumentationParameters, instrumentationState)); - executionInstrumentation.onDispatched(beginExecutionCF); + executionInstrumentation.onDispatched(); GraphQLSchema graphQLSchema = instrumentation.instrumentSchema(this.graphQLSchema, instrumentationParameters, instrumentationState); CompletableFuture executionResult = parseValidateAndExecute(instrumentedExecutionInput, graphQLSchema, instrumentationState); // // finish up instrumentation - executionResult = executionResult.whenComplete(completeInstrumentationCtxCF(executionInstrumentation, beginExecutionCF)); + executionResult = executionResult.whenComplete(completeInstrumentationCtxCF(executionInstrumentation)); // // allow instrumentation to tweak the result executionResult = executionResult.thenCompose(result -> instrumentation.instrumentExecutionResult(result, instrumentationParameters, instrumentationState)); @@ -453,7 +442,7 @@ public CompletableFuture executeAsync(ExecutionInput executionI private CompletableFuture handleAbortException(ExecutionInput executionInput, InstrumentationState instrumentationState, AbortExecutionException abortException) { CompletableFuture executionResult = CompletableFuture.completedFuture(abortException.toExecutionResult()); - InstrumentationExecutionParameters instrumentationParameters = new InstrumentationExecutionParameters(executionInput, this.graphQLSchema, instrumentationState); + InstrumentationExecutionParameters instrumentationParameters = new InstrumentationExecutionParameters(executionInput, this.graphQLSchema); // // allow instrumentation to tweak the result executionResult = executionResult.thenCompose(result -> instrumentation.instrumentExecutionResult(result, instrumentationParameters, instrumentationState)); @@ -496,12 +485,8 @@ private PreparsedDocumentEntry parseAndValidate(AtomicReference ExecutionInput executionInput = executionInputRef.get(); String query = executionInput.getQuery(); - if (logNotSafe.isDebugEnabled()) { - logNotSafe.debug("Parsing query: '{}'...", query); - } ParseAndValidateResult parseResult = parse(executionInput, graphQLSchema, instrumentationState); if (parseResult.isFailure()) { - logNotSafe.warn("Query did not parse : '{}'", executionInput.getQuery()); return new PreparsedDocumentEntry(parseResult.getSyntaxException().toInvalidSyntaxError()); } else { final Document document = parseResult.getDocument(); @@ -509,12 +494,8 @@ private PreparsedDocumentEntry parseAndValidate(AtomicReference executionInput = executionInput.transform(builder -> builder.variables(parseResult.getVariables())); executionInputRef.set(executionInput); - if (logNotSafe.isDebugEnabled()) { - logNotSafe.debug("Validating query: '{}'", query); - } final List errors = validate(executionInput, document, graphQLSchema, instrumentationState); if (!errors.isEmpty()) { - logNotSafe.warn("Query did not validate : '{}'", query); return new PreparsedDocumentEntry(document, errors); } @@ -523,17 +504,15 @@ private PreparsedDocumentEntry parseAndValidate(AtomicReference } private ParseAndValidateResult parse(ExecutionInput executionInput, GraphQLSchema graphQLSchema, InstrumentationState instrumentationState) { - InstrumentationExecutionParameters parameters = new InstrumentationExecutionParameters(executionInput, graphQLSchema, instrumentationState); + InstrumentationExecutionParameters parameters = new InstrumentationExecutionParameters(executionInput, graphQLSchema); InstrumentationContext parseInstrumentationCtx = nonNullCtx(instrumentation.beginParse(parameters, instrumentationState)); - CompletableFuture documentCF = new CompletableFuture<>(); - parseInstrumentationCtx.onDispatched(documentCF); + parseInstrumentationCtx.onDispatched(); ParseAndValidateResult parseResult = ParseAndValidate.parse(executionInput); if (parseResult.isFailure()) { parseInstrumentationCtx.onCompleted(null, parseResult.getSyntaxException()); return parseResult; } else { - documentCF.complete(parseResult.getDocument()); parseInstrumentationCtx.onCompleted(parseResult.getDocument(), null); DocumentAndVariables documentAndVariables = parseResult.getDocumentAndVariables(); @@ -544,73 +523,27 @@ private ParseAndValidateResult parse(ExecutionInput executionInput, GraphQLSchem } private List validate(ExecutionInput executionInput, Document document, GraphQLSchema graphQLSchema, InstrumentationState instrumentationState) { - InstrumentationContext> validationCtx = nonNullCtx(instrumentation.beginValidation(new InstrumentationValidationParameters(executionInput, document, graphQLSchema, instrumentationState), instrumentationState)); - CompletableFuture> cf = new CompletableFuture<>(); - validationCtx.onDispatched(cf); + InstrumentationContext> validationCtx = nonNullCtx(instrumentation.beginValidation(new InstrumentationValidationParameters(executionInput, document, graphQLSchema), instrumentationState)); + validationCtx.onDispatched(); Predicate> validationRulePredicate = executionInput.getGraphQLContext().getOrDefault(ParseAndValidate.INTERNAL_VALIDATION_PREDICATE_HINT, r -> true); Locale locale = executionInput.getLocale() != null ? executionInput.getLocale() : Locale.getDefault(); List validationErrors = ParseAndValidate.validate(graphQLSchema, document, validationRulePredicate, locale); validationCtx.onCompleted(validationErrors, null); - cf.complete(validationErrors); return validationErrors; } - private CompletableFuture execute(ExecutionInput executionInput, Document document, GraphQLSchema graphQLSchema, InstrumentationState instrumentationState) { + private CompletableFuture execute(ExecutionInput executionInput, + Document document, + GraphQLSchema graphQLSchema, + InstrumentationState instrumentationState + ) { - Execution execution = new Execution(queryStrategy, mutationStrategy, subscriptionStrategy, instrumentation, valueUnboxer); + Execution execution = new Execution(queryStrategy, mutationStrategy, subscriptionStrategy, instrumentation, valueUnboxer, doNotAutomaticallyDispatchDataLoader); ExecutionId executionId = executionInput.getExecutionId(); - if (logNotSafe.isDebugEnabled()) { - logNotSafe.debug("Executing '{}'. operation name: '{}'. query: '{}'. variables '{}'", executionId, executionInput.getOperationName(), executionInput.getQuery(), executionInput.getVariables()); - } - CompletableFuture future = execution.execute(document, graphQLSchema, executionId, executionInput, instrumentationState); - future = future.whenComplete((result, throwable) -> { - if (throwable != null) { - logNotSafe.error(String.format("Execution '%s' threw exception when executing : query : '%s'. variables '%s'", executionId, executionInput.getQuery(), executionInput.getVariables()), throwable); - } else { - if (log.isDebugEnabled()) { - int errorCount = result.getErrors().size(); - if (errorCount > 0) { - log.debug("Execution '{}' completed with '{}' errors", executionId, errorCount); - } else { - log.debug("Execution '{}' completed with zero errors", executionId); - } - } - } - }); - return future; + return execution.execute(document, graphQLSchema, executionId, executionInput, instrumentationState); } - private static Instrumentation checkInstrumentationDefaultState(Instrumentation instrumentation, boolean doNotAddDefaultInstrumentations) { - if (doNotAddDefaultInstrumentations) { - return instrumentation == null ? SimplePerformantInstrumentation.INSTANCE : instrumentation; - } - if (instrumentation instanceof DataLoaderDispatcherInstrumentation) { - return instrumentation; - } - if (instrumentation instanceof NoContextChainedInstrumentation) { - return instrumentation; - } - if (instrumentation == null) { - return new DataLoaderDispatcherInstrumentation(); - } - - // - // if we don't have a DataLoaderDispatcherInstrumentation in play, we add one. We want DataLoader to be 1st class in graphql without requiring - // people to remember to wire it in. Later we may decide to have more default instrumentations but for now it's just the one - // - List instrumentationList = new ArrayList<>(); - if (instrumentation instanceof ChainedInstrumentation) { - instrumentationList.addAll(((ChainedInstrumentation) instrumentation).getInstrumentations()); - } else { - instrumentationList.add(instrumentation); - } - boolean containsDLInstrumentation = instrumentationList.stream().anyMatch(instr -> instr instanceof DataLoaderDispatcherInstrumentation); - if (!containsDLInstrumentation) { - instrumentationList.add(new DataLoaderDispatcherInstrumentation()); - } - return new ChainedInstrumentation(instrumentationList); - } } diff --git a/src/main/java/graphql/GraphQLContext.java b/src/main/java/graphql/GraphQLContext.java index 081c17725f..8b913919d3 100644 --- a/src/main/java/graphql/GraphQLContext.java +++ b/src/main/java/graphql/GraphQLContext.java @@ -5,7 +5,9 @@ import java.util.Optional; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; +import java.util.function.BiFunction; import java.util.function.Consumer; +import java.util.function.Function; import java.util.stream.Stream; import static graphql.Assert.assertNotNull; @@ -171,6 +173,52 @@ public GraphQLContext putAll(Consumer contextBuilderCons return putAll(builder); } + /** + * Attempts to compute a mapping for the specified key and its + * current mapped value (or null if there is no current mapping). + * + * @param key key with which the specified value is to be associated + * @param remappingFunction the function to compute a value + * + * @return the new value associated with the specified key, or null if none + * @param for two + */ + public T compute(Object key, BiFunction remappingFunction) { + assertNotNull(remappingFunction); + return (T) map.compute(assertNotNull(key), (k, v) -> remappingFunction.apply(k, (T) v)); + } + + /** + * If the specified key is not already associated with a value (or is mapped to null), + * attempts to compute its value using the given mapping function and enters it into this map unless null. + * + * @param key key with which the specified value is to be associated + * @param mappingFunction the function to compute a value + * + * @return the current (existing or computed) value associated with the specified key, or null if the computed value is null + * @param for two + */ + + public T computeIfAbsent(Object key, Function mappingFunction) { + return (T) map.computeIfAbsent(assertNotNull(key), assertNotNull(mappingFunction)); + } + + /** + * If the value for the specified key is present and non-null, + * attempts to compute a new mapping given the key and its current mapped value. + * + * @param key key with which the specified value is to be associated + * @param remappingFunction the function to compute a value + * + * @return the new value associated with the specified key, or null if none + * @param for two + */ + + public T computeIfPresent(Object key, BiFunction remappingFunction) { + assertNotNull(remappingFunction); + return (T) map.computeIfPresent(assertNotNull(key), (k, v) -> remappingFunction.apply(k, (T) v)); + } + /** * @return a stream of entries in the context */ diff --git a/src/main/java/graphql/GraphQLError.java b/src/main/java/graphql/GraphQLError.java index b4fc6fa600..c18752b14a 100644 --- a/src/main/java/graphql/GraphQLError.java +++ b/src/main/java/graphql/GraphQLError.java @@ -39,8 +39,10 @@ public interface GraphQLError extends Serializable { ErrorClassification getErrorType(); /** - * The graphql spec says that the (optional) path field of any error should be a list - * of path entries https://spec.graphql.org/October2021/#sec-Handling-Field-Errors + * The graphql spec says that the (optional) path field of any error must be + * a list of path entries starting at the root of the response + * and ending with the field associated with the error + * https://spec.graphql.org/draft/#sec-Errors.Error-Result-Format * * @return the path in list format */ diff --git a/src/main/java/graphql/TypeResolutionEnvironment.java b/src/main/java/graphql/TypeResolutionEnvironment.java index c606fdd5fe..ca57979bf5 100644 --- a/src/main/java/graphql/TypeResolutionEnvironment.java +++ b/src/main/java/graphql/TypeResolutionEnvironment.java @@ -94,8 +94,7 @@ public GraphQLSchema getSchema() { * * @deprecated use {@link #getGraphQLContext()} instead */ - @Deprecated - @DeprecatedAt("2021-12-27") + @Deprecated(since = "2021-12-27") public T getContext() { //noinspection unchecked return (T) context; diff --git a/src/main/java/graphql/agent/result/ExecutionTrackingResult.java b/src/main/java/graphql/agent/result/ExecutionTrackingResult.java new file mode 100644 index 0000000000..9896da2c1d --- /dev/null +++ b/src/main/java/graphql/agent/result/ExecutionTrackingResult.java @@ -0,0 +1,154 @@ +package graphql.agent.result; + +import graphql.PublicApi; +import graphql.execution.ResultPath; +import org.dataloader.DataLoader; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.atomic.AtomicLong; +import java.util.concurrent.atomic.AtomicReference; + +import static graphql.agent.result.ExecutionTrackingResult.DFResultType.PENDING; + +/** + * This is the result of the agent tracking an execution. + * It can be found inside the GraphQLContext after the execution with the key {@link ExecutionTrackingResult#EXECUTION_TRACKING_KEY} + * + * Note: While this is public API, the main goal is temporary debugging to understand an execution better with minimal overhead. + * Therefore this will evolve over time if needed to be performant and reflect the overall execution. + * It is not recommended to have the agent on always or to rely on this class during normal execution + */ +@PublicApi +public class ExecutionTrackingResult { + + public static final String EXECUTION_TRACKING_KEY = "__GJ_AGENT_EXECUTION_TRACKING"; + public final AtomicReference startThread = new AtomicReference<>(); + public final AtomicReference endThread = new AtomicReference<>(); + public final AtomicLong startExecutionTime = new AtomicLong(); + public final AtomicLong endExecutionTime = new AtomicLong(); + public final Map resultPathToDataLoaderUsed = new ConcurrentHashMap<>(); + public final Map dataLoaderToName = new ConcurrentHashMap<>(); + + public final Map timePerPath = new ConcurrentHashMap<>(); + public final Map finishedTimePerPath = new ConcurrentHashMap<>(); + public final Map finishedThreadPerPath = new ConcurrentHashMap<>(); + public final Map startInvocationThreadPerPath = new ConcurrentHashMap<>(); + private final Map dfResultTypes = new ConcurrentHashMap<>(); + public final Map> dataLoaderNameToBatchCall = new ConcurrentHashMap<>(); + + public static class BatchLoadingCall { + public BatchLoadingCall(int keyCount, String threadName) { + this.keyCount = keyCount; + this.threadName = threadName; + } + + public final int keyCount; + public final String threadName; + + } + + + public String print(String executionId) { + StringBuilder s = new StringBuilder(); + s.append("==========================").append("\n"); + s.append("Summary for execution with id ").append(executionId).append("\n"); + s.append("==========================").append("\n"); + s.append("Execution time in ms:").append((endExecutionTime.get() - startExecutionTime.get()) / 1_000_000L).append("\n"); + s.append("Fields count: ").append(timePerPath.keySet().size()).append("\n"); + s.append("Blocking fields count: ").append(dfResultTypes.values().stream().filter(dfResultType -> dfResultType != PENDING).count()).append("\n"); + s.append("Nonblocking fields count: ").append(dfResultTypes.values().stream().filter(dfResultType -> dfResultType == PENDING).count()).append("\n"); + s.append("DataLoaders used: ").append(dataLoaderToName.size()).append("\n"); + s.append("DataLoader names: ").append(dataLoaderToName.values()).append("\n"); + s.append("start execution thread: '").append(startThread.get()).append("'\n"); + s.append("end execution thread: '").append(endThread.get()).append("'\n"); + s.append("BatchLoader calls details: ").append("\n"); + s.append("==========================").append("\n"); + for (String dataLoaderName : dataLoaderNameToBatchCall.keySet()) { + s.append("Batch call: '").append(dataLoaderName).append("' made ").append(dataLoaderNameToBatchCall.get(dataLoaderName).size()).append(" times, ").append("\n"); + for (BatchLoadingCall batchLoadingCall : dataLoaderNameToBatchCall.get(dataLoaderName)) { + s.append("Batch call with ").append(batchLoadingCall.keyCount).append(" keys ").append(" in thread ").append(batchLoadingCall.threadName).append("\n"); + } + List resultPathUsed = new ArrayList<>(); + for (ResultPath resultPath : resultPathToDataLoaderUsed.keySet()) { + if (resultPathToDataLoaderUsed.get(resultPath).equals(dataLoaderName)) { + resultPathUsed.add(resultPath); + } + } + s.append("DataLoader: '").append(dataLoaderName).append("' used in fields: ").append(resultPathUsed).append("\n"); + } + s.append("Field details:").append("\n"); + s.append("===============").append("\n"); + for (ResultPath path : timePerPath.keySet()) { + s.append("Field: '").append(path).append("'\n"); + s.append("invocation time: ").append(timePerPath.get(path)).append(" nano seconds, ").append("\n"); + s.append("completion time: ").append(finishedTimePerPath.get(path)).append(" nano seconds, ").append("\n"); + s.append("Result type: ").append(dfResultTypes.get(path)).append("\n"); + s.append("invoked in thread: ").append(startInvocationThreadPerPath.get(path)).append("\n"); + s.append("finished in thread: ").append(finishedThreadPerPath.get(path)).append("\n"); + s.append("-------------\n"); + } + s.append("==========================").append("\n"); + s.append("==========================").append("\n"); + return s.toString(); + + } + + @Override + public String toString() { + return "ExecutionData{" + + "resultPathToDataLoaderUsed=" + resultPathToDataLoaderUsed + + ", dataLoaderNames=" + dataLoaderToName.values() + + ", timePerPath=" + timePerPath + + ", dfResultTypes=" + dfResultTypes + + '}'; + } + + public enum DFResultType { + DONE_OK, + DONE_EXCEPTIONALLY, + DONE_CANCELLED, + PENDING, + } + + public List getDataLoaderNames() { + return new ArrayList<>(dataLoaderToName.values()); + } + + + public void start(ResultPath path, long startTime) { + timePerPath.put(path, startTime); + } + + public void end(ResultPath path, long endTime) { + timePerPath.put(path, endTime - timePerPath.get(path)); + } + + public int dataFetcherCount() { + return timePerPath.size(); + } + + public long getTime(ResultPath path) { + return timePerPath.get(path); + } + + public long getTime(String path) { + return timePerPath.get(ResultPath.parse(path)); + } + + public void setDfResultTypes(ResultPath resultPath, DFResultType resultTypes) { + dfResultTypes.put(resultPath, resultTypes); + } + + public DFResultType getDfResultTypes(ResultPath resultPath) { + return dfResultTypes.get(resultPath); + } + + public DFResultType getDfResultTypes(String resultPath) { + return dfResultTypes.get(ResultPath.parse(resultPath)); + } + + +} diff --git a/src/main/java/graphql/analysis/MaxQueryComplexityInstrumentation.java b/src/main/java/graphql/analysis/MaxQueryComplexityInstrumentation.java index 87a00e976a..6e47e92bb0 100644 --- a/src/main/java/graphql/analysis/MaxQueryComplexityInstrumentation.java +++ b/src/main/java/graphql/analysis/MaxQueryComplexityInstrumentation.java @@ -12,10 +12,9 @@ import graphql.execution.instrumentation.parameters.InstrumentationValidationParameters; import graphql.validation.ValidationError; import org.jetbrains.annotations.Nullable; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import java.util.List; +import java.util.concurrent.CompletableFuture; import java.util.concurrent.atomic.AtomicReference; import java.util.function.Function; @@ -32,8 +31,6 @@ @PublicApi public class MaxQueryComplexityInstrumentation extends SimplePerformantInstrumentation { - private static final Logger log = LoggerFactory.getLogger(MaxQueryComplexityInstrumentation.class); - private final int maxComplexity; private final FieldComplexityCalculator fieldComplexityCalculator; private final Function maxQueryComplexityExceededFunction; @@ -82,11 +79,10 @@ public MaxQueryComplexityInstrumentation(int maxComplexity, FieldComplexityCalcu } @Override - public InstrumentationState createState(InstrumentationCreateStateParameters parameters) { - return new State(); + public @Nullable CompletableFuture createStateAsync(InstrumentationCreateStateParameters parameters) { + return CompletableFuture.completedFuture(new State()); } - @Override public @Nullable InstrumentationContext> beginValidation(InstrumentationValidationParameters parameters, InstrumentationState rawState) { State state = ofState(rawState); @@ -100,9 +96,6 @@ public InstrumentationState createState(InstrumentationCreateStateParameters par State state = ofState(rawState); QueryComplexityCalculator queryComplexityCalculator = newQueryComplexityCalculator(instrumentationExecuteOperationParameters.getExecutionContext()); int totalComplexity = queryComplexityCalculator.calculate(); - if (log.isDebugEnabled()) { - log.debug("Query complexity: {}", totalComplexity); - } if (totalComplexity > maxComplexity) { QueryComplexityInfo queryComplexityInfo = QueryComplexityInfo.newQueryComplexityInfo() .complexity(totalComplexity) diff --git a/src/main/java/graphql/analysis/NodeVisitorWithTypeTracking.java b/src/main/java/graphql/analysis/NodeVisitorWithTypeTracking.java index 972a6f8e9c..2a2b237aeb 100644 --- a/src/main/java/graphql/analysis/NodeVisitorWithTypeTracking.java +++ b/src/main/java/graphql/analysis/NodeVisitorWithTypeTracking.java @@ -36,7 +36,6 @@ import static graphql.Assert.assertNotNull; import static graphql.schema.GraphQLTypeUtil.unwrapAll; import static graphql.util.TraverserContext.Phase.LEAVE; -import static java.lang.String.format; /** * Internally used node visitor which delegates to a {@link QueryVisitor} with type @@ -142,8 +141,8 @@ public TraversalControl visitFragmentSpread(FragmentSpread fragmentSpread, Trave GraphQLCompositeType typeCondition = (GraphQLCompositeType) schema.getType(fragmentDefinition.getTypeCondition().getName()); assertNotNull(typeCondition, - () -> format("Invalid type condition '%s' in fragment '%s'", fragmentDefinition.getTypeCondition().getName(), - fragmentDefinition.getName())); + "Invalid type condition '%s' in fragment '%s'", fragmentDefinition.getTypeCondition().getName(), + fragmentDefinition.getName()); context.setVar(QueryTraversalContext.class, new QueryTraversalContext(typeCondition, parentEnv.getEnvironment(), fragmentDefinition, graphQLContext)); return TraversalControl.CONTINUE; } @@ -204,7 +203,7 @@ public TraversalControl visitArgument(Argument argument, TraverserContext QueryVisitorFieldEnvironment fieldEnv = fieldCtx.getEnvironment(); GraphQLFieldsContainer fieldsContainer = fieldEnv.getFieldsContainer(); - GraphQLFieldDefinition fieldDefinition = Introspection.getFieldDef(schema, fieldsContainer, field.getName()); + GraphQLFieldDefinition fieldDefinition = Introspection.getFieldDefinition(schema, fieldsContainer, field.getName()); GraphQLArgument graphQLArgument = fieldDefinition.getArgument(argument.getName()); String argumentName = graphQLArgument.getName(); diff --git a/src/main/java/graphql/collect/ImmutableMapWithNullValues.java b/src/main/java/graphql/collect/ImmutableMapWithNullValues.java index 8eab40c56d..e9ca664efb 100644 --- a/src/main/java/graphql/collect/ImmutableMapWithNullValues.java +++ b/src/main/java/graphql/collect/ImmutableMapWithNullValues.java @@ -1,7 +1,6 @@ package graphql.collect; import graphql.Assert; -import graphql.DeprecatedAt; import graphql.Internal; import java.util.Collection; @@ -82,29 +81,25 @@ public V get(Object key) { } @Override - @Deprecated - @DeprecatedAt("2020-11-10") + @Deprecated(since = "2020-11-10") public V put(K key, V value) { throw new UnsupportedOperationException(); } @Override - @Deprecated - @DeprecatedAt("2020-11-10") + @Deprecated(since = "2020-11-10") public V remove(Object key) { throw new UnsupportedOperationException(); } @Override - @Deprecated - @DeprecatedAt("2020-11-10") + @Deprecated(since = "2020-11-10") public void putAll(Map m) { throw new UnsupportedOperationException(); } @Override - @Deprecated - @DeprecatedAt("2020-11-10") + @Deprecated(since = "2020-11-10") public void clear() { throw new UnsupportedOperationException(); } @@ -145,64 +140,55 @@ public void forEach(BiConsumer action) { } @Override - @Deprecated - @DeprecatedAt("2020-11-10") + @Deprecated(since = "2020-11-10") public void replaceAll(BiFunction function) { throw new UnsupportedOperationException(); } @Override - @Deprecated - @DeprecatedAt("2020-11-10") + @Deprecated(since = "2020-11-10") public V putIfAbsent(K key, V value) { throw new UnsupportedOperationException(); } @Override - @Deprecated - @DeprecatedAt("2020-11-10") + @Deprecated(since = "2020-11-10") public boolean remove(Object key, Object value) { throw new UnsupportedOperationException(); } @Override - @Deprecated - @DeprecatedAt("2020-11-10") + @Deprecated(since = "2020-11-10") public boolean replace(K key, V oldValue, V newValue) { throw new UnsupportedOperationException(); } @Override - @Deprecated - @DeprecatedAt("2020-11-10") + @Deprecated(since = "2020-11-10") public V replace(K key, V value) { throw new UnsupportedOperationException(); } @Override - @Deprecated - @DeprecatedAt("2020-11-10") + @Deprecated(since = "2020-11-10") public V computeIfAbsent(K key, Function mappingFunction) { throw new UnsupportedOperationException(); } @Override - @Deprecated - @DeprecatedAt("2020-11-10") + @Deprecated(since = "2020-11-10") public V computeIfPresent(K key, BiFunction remappingFunction) { throw new UnsupportedOperationException(); } @Override - @Deprecated - @DeprecatedAt("2020-11-10") + @Deprecated(since = "2020-11-10") public V compute(K key, BiFunction remappingFunction) { throw new UnsupportedOperationException(); } @Override - @Deprecated - @DeprecatedAt("2020-11-10") + @Deprecated(since = "2020-11-10") public V merge(K key, V value, BiFunction remappingFunction) { throw new UnsupportedOperationException(); } diff --git a/src/main/java/graphql/execution/AbstractAsyncExecutionStrategy.java b/src/main/java/graphql/execution/AbstractAsyncExecutionStrategy.java index 577bb00f96..863e0d6fad 100644 --- a/src/main/java/graphql/execution/AbstractAsyncExecutionStrategy.java +++ b/src/main/java/graphql/execution/AbstractAsyncExecutionStrategy.java @@ -22,18 +22,17 @@ public AbstractAsyncExecutionStrategy(DataFetcherExceptionHandler dataFetcherExc super(dataFetcherExceptionHandler); } - // This method is kept for backward compatibility. Prefer calling/overriding another handleResults method - protected BiConsumer, Throwable> handleResults(ExecutionContext executionContext, List fieldNames, CompletableFuture overallResult) { - return (List results, Throwable exception) -> { + protected BiConsumer, Throwable> handleResults(ExecutionContext executionContext, List fieldNames, CompletableFuture overallResult) { + return (List results, Throwable exception) -> { if (exception != null) { handleNonNullException(executionContext, overallResult, exception); return; } Map resolvedValuesByField = Maps.newLinkedHashMapWithExpectedSize(fieldNames.size()); int ix = 0; - for (ExecutionResult executionResult : results) { + for (Object result : results) { String fieldName = fieldNames.get(ix++); - resolvedValuesByField.put(fieldName, executionResult.getData()); + resolvedValuesByField.put(fieldName, result); } overallResult.complete(new ExecutionResultImpl(resolvedValuesByField, executionContext.getErrors())); }; diff --git a/src/main/java/graphql/execution/Async.java b/src/main/java/graphql/execution/Async.java index 56f7a2f9be..aa05f6ee4d 100644 --- a/src/main/java/graphql/execution/Async.java +++ b/src/main/java/graphql/execution/Async.java @@ -6,6 +6,7 @@ import org.jetbrains.annotations.Nullable; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Iterator; @@ -17,19 +18,53 @@ import java.util.function.Function; import java.util.function.Supplier; +import static graphql.Assert.assertTrue; + @Internal @SuppressWarnings("FutureReturnValueIgnored") public class Async { + /** + * A builder of materialized objects or {@link CompletableFuture}s than can present a promise to the list of them + *

+ * This builder has a strict contract on size whereby if the expectedSize is five, then there MUST be five elements presented to it. + * + * @param for two + */ public interface CombinedBuilder { + /** + * This adds a {@link CompletableFuture} into the collection of results + * + * @param completableFuture the CF to add + */ void add(CompletableFuture completableFuture); + /** + * This adds a new value which can be either a materialized value or a {@link CompletableFuture} + * + * @param object the object to add + */ + void addObject(Object object); + + /** + * This will return a {@code CompletableFuture>} even if the inputs are all materialized values + * + * @return a CompletableFuture to a List of values + */ CompletableFuture> await(); + + /** + * This will return a {@code CompletableFuture>} if ANY of the input values are async + * otherwise it just return a materialised {@code List} + * + * @return either a CompletableFuture or a materialized list + */ + /* CompletableFuture> | List */ Object awaitPolymorphic(); } /** - * Combines 0 or more CF into one. It is a wrapper around CompletableFuture.allOf. + * Combines zero or more CFs into one. It is a wrapper around CompletableFuture.allOf. * * @param expectedSize how many we expect * @param for two @@ -55,13 +90,22 @@ public void add(CompletableFuture completableFuture) { this.ix++; } + @Override + public void addObject(Object object) { + this.ix++; + } @Override public CompletableFuture> await() { - Assert.assertTrue(ix == 0, () -> "expected size was " + 0 + " got " + ix); + assertTrue(ix == 0, "expected size was 0 got %d", ix); return typedEmpty(); } + @Override + public Object awaitPolymorphic() { + Assert.assertTrue(ix == 0, () -> "expected size was " + 0 + " got " + ix); + return Collections.emptyList(); + } // implementation details: infer the type of Completable> from a singleton empty private static final CompletableFuture> EMPTY = CompletableFuture.completedFuture(Collections.emptyList()); @@ -75,104 +119,232 @@ private static CompletableFuture typedEmpty() { private static class Single implements CombinedBuilder { // avoiding array allocation as there is only 1 CF - private CompletableFuture completableFuture; + private Object value; private int ix; @Override public void add(CompletableFuture completableFuture) { - this.completableFuture = completableFuture; + this.value = completableFuture; + this.ix++; + } + + @Override + public void addObject(Object object) { + this.value = object; this.ix++; } @Override public CompletableFuture> await() { + commonSizeAssert(); + if (value instanceof CompletableFuture) { + @SuppressWarnings("unchecked") + CompletableFuture cf = (CompletableFuture) value; + return cf.thenApply(Collections::singletonList); + } + //noinspection unchecked + return CompletableFuture.completedFuture(Collections.singletonList((T) value)); + } + + @Override + public Object awaitPolymorphic() { + commonSizeAssert(); + if (value instanceof CompletableFuture) { + @SuppressWarnings("unchecked") + CompletableFuture cf = (CompletableFuture) value; + return cf.thenApply(Collections::singletonList); + } + //noinspection unchecked + return Collections.singletonList((T) value); + } + + private void commonSizeAssert() { Assert.assertTrue(ix == 1, () -> "expected size was " + 1 + " got " + ix); - return completableFuture.thenApply(Collections::singletonList); } } private static class Many implements CombinedBuilder { - private final CompletableFuture[] array; + private final Object[] array; private int ix; + private int cfCount; - @SuppressWarnings("unchecked") private Many(int size) { - this.array = new CompletableFuture[size]; + this.array = new Object[size]; this.ix = 0; + cfCount = 0; } @Override public void add(CompletableFuture completableFuture) { array[ix++] = completableFuture; + cfCount++; } + @Override + public void addObject(Object object) { + array[ix++] = object; + if (object instanceof CompletableFuture) { + cfCount++; + } + } + + @SuppressWarnings("unchecked") @Override public CompletableFuture> await() { - Assert.assertTrue(ix == array.length, () -> "expected size was " + array.length + " got " + ix); + commonSizeAssert(); CompletableFuture> overallResult = new CompletableFuture<>(); - CompletableFuture.allOf(array) - .whenComplete((ignored, exception) -> { - if (exception != null) { - overallResult.completeExceptionally(exception); - return; - } - List results = new ArrayList<>(array.length); - for (CompletableFuture future : array) { - results.add(future.join()); - } - overallResult.complete(results); - }); + if (cfCount == 0) { + overallResult.complete(materialisedList(array)); + } else { + CompletableFuture[] cfsArr = copyOnlyCFsToArray(); + CompletableFuture.allOf(cfsArr) + .whenComplete((ignored, exception) -> { + if (exception != null) { + overallResult.completeExceptionally(exception); + return; + } + List results = new ArrayList<>(array.length); + if (cfsArr.length == array.length) { + // they are all CFs + for (CompletableFuture cf : cfsArr) { + results.add(cf.join()); + } + } else { + // it's a mixed bag of CFs and materialized objects + for (Object object : array) { + if (object instanceof CompletableFuture) { + CompletableFuture cf = (CompletableFuture) object; + // join is safe since they are all completed earlier via CompletableFuture.allOf() + results.add(cf.join()); + } else { + results.add((T) object); + } + } + } + overallResult.complete(results); + }); + } return overallResult; } + @SuppressWarnings("unchecked") + @NotNull + private CompletableFuture[] copyOnlyCFsToArray() { + if (cfCount == array.length) { + // if it's all CFs - make a type safe copy via C code + return Arrays.copyOf(array, array.length, CompletableFuture[].class); + } else { + int i = 0; + CompletableFuture[] dest = new CompletableFuture[cfCount]; + for (Object o : array) { + if (o instanceof CompletableFuture) { + dest[i] = (CompletableFuture) o; + i++; + } + } + return dest; + } + } + + @Override + public Object awaitPolymorphic() { + if (cfCount == 0) { + commonSizeAssert(); + return materialisedList(array); + } else { + return await(); + } + } + + @NotNull + private List materialisedList(Object[] array) { + List results = new ArrayList<>(array.length); + for (Object object : array) { + //noinspection unchecked + results.add((T) object); + } + return results; + } + + private void commonSizeAssert() { + Assert.assertTrue(ix == array.length, () -> "expected size was " + array.length + " got " + ix); + } + + } + + @SuppressWarnings("unchecked") + public static CompletableFuture> each(Collection list, Function cfOrMaterialisedValueFactory) { + Object l = eachPolymorphic(list, cfOrMaterialisedValueFactory); + if (l instanceof CompletableFuture) { + return (CompletableFuture>) l; + } else { + return CompletableFuture.completedFuture((List) l); + } } - public static CompletableFuture> each(Collection list, Function> cfFactory) { - CombinedBuilder futures = ofExpectedSize(list.size()); + /** + * This will run the value factory for each of the values in the provided list. + *

+ * If any of the values provided is a {@link CompletableFuture} it will return a {@link CompletableFuture} result object + * that joins on all values otherwise if none of the values are a {@link CompletableFuture} then it will return a materialized list. + * + * @param list the list to work over + * @param cfOrMaterialisedValueFactory the value factory to call for each iterm in the list + * @param for two + * + * @return a {@link CompletableFuture} to the list of resolved values or the list of values in a materialized fashion + */ + public static /* CompletableFuture> | List */ Object eachPolymorphic(Collection list, Function cfOrMaterialisedValueFactory) { + CombinedBuilder futures = ofExpectedSize(list.size()); for (T t : list) { - CompletableFuture cf; try { - cf = cfFactory.apply(t); - Assert.assertNotNull(cf, () -> "cfFactory must return a non null value"); + Object value = cfOrMaterialisedValueFactory.apply(t); + futures.addObject(value); } catch (Exception e) { - cf = new CompletableFuture<>(); + CompletableFuture cf = new CompletableFuture<>(); // Async.each makes sure that it is not a CompletionException inside a CompletionException cf.completeExceptionally(new CompletionException(e)); + futures.add(cf); } - futures.add(cf); } - return futures.await(); + return futures.awaitPolymorphic(); } - public static CompletableFuture> eachSequentially(Iterable list, BiFunction, CompletableFuture> cfFactory) { + public static CompletableFuture> eachSequentially(Iterable list, BiFunction, Object> cfOrMaterialisedValueFactory) { CompletableFuture> result = new CompletableFuture<>(); - eachSequentiallyImpl(list.iterator(), cfFactory, new ArrayList<>(), result); + eachSequentiallyPolymorphicImpl(list.iterator(), cfOrMaterialisedValueFactory, new ArrayList<>(), result); return result; } - private static void eachSequentiallyImpl(Iterator iterator, BiFunction, CompletableFuture> cfFactory, List tmpResult, CompletableFuture> overallResult) { + @SuppressWarnings("unchecked") + private static void eachSequentiallyPolymorphicImpl(Iterator iterator, BiFunction, Object> cfOrMaterialisedValueFactory, List tmpResult, CompletableFuture> overallResult) { if (!iterator.hasNext()) { overallResult.complete(tmpResult); return; } - CompletableFuture cf; + Object value; try { - cf = cfFactory.apply(iterator.next(), tmpResult); - Assert.assertNotNull(cf, () -> "cfFactory must return a non null value"); + value = cfOrMaterialisedValueFactory.apply(iterator.next(), tmpResult); } catch (Exception e) { - cf = new CompletableFuture<>(); - cf.completeExceptionally(new CompletionException(e)); + overallResult.completeExceptionally(new CompletionException(e)); + return; + } + if (value instanceof CompletableFuture) { + CompletableFuture cf = (CompletableFuture) value; + cf.whenComplete((cfResult, exception) -> { + if (exception != null) { + overallResult.completeExceptionally(exception); + return; + } + tmpResult.add(cfResult); + eachSequentiallyPolymorphicImpl(iterator, cfOrMaterialisedValueFactory, tmpResult, overallResult); + }); + } else { + tmpResult.add((U) value); + eachSequentiallyPolymorphicImpl(iterator, cfOrMaterialisedValueFactory, tmpResult, overallResult); } - cf.whenComplete((cfResult, exception) -> { - if (exception != null) { - overallResult.completeExceptionally(exception); - return; - } - tmpResult.add(cfResult); - eachSequentiallyImpl(iterator, cfFactory, tmpResult, overallResult); - }); } @@ -184,12 +356,28 @@ private static void eachSequentiallyImpl(Iterator iterator, BiFunction * * @return a CompletableFuture */ - public static CompletableFuture toCompletableFuture(T t) { + @SuppressWarnings("unchecked") + public static CompletableFuture toCompletableFuture(Object t) { if (t instanceof CompletionStage) { - //noinspection unchecked return ((CompletionStage) t).toCompletableFuture(); } else { - return CompletableFuture.completedFuture(t); + return CompletableFuture.completedFuture((T) t); + } + } + + /** + * Turns a CompletionStage into a CompletableFuture if it's not already, otherwise leaves it alone + * as a materialized object. + * + * @param object - the object to check + * + * @return a CompletableFuture from a CompletionStage or the materialized object itself + */ + public static Object toCompletableFutureOrMaterializedObject(Object object) { + if (object instanceof CompletionStage) { + return ((CompletionStage) object).toCompletableFuture(); + } else { + return object; } } @@ -210,7 +398,7 @@ public static CompletableFuture exceptionallyCompletedFuture(Throwable ex } /** - * If the passed in CompletableFuture is null then it creates a CompletableFuture that resolves to null + * If the passed in CompletableFuture is null, then it creates a CompletableFuture that resolves to null * * @param completableFuture the CF to use * @param for two @@ -220,4 +408,4 @@ public static CompletableFuture exceptionallyCompletedFuture(Throwable ex public static @NotNull CompletableFuture orNullCompletedFuture(@Nullable CompletableFuture completableFuture) { return completableFuture != null ? completableFuture : CompletableFuture.completedFuture(null); } -} +} \ No newline at end of file diff --git a/src/main/java/graphql/execution/AsyncExecutionStrategy.java b/src/main/java/graphql/execution/AsyncExecutionStrategy.java index 6608fba0f3..27e24ace88 100644 --- a/src/main/java/graphql/execution/AsyncExecutionStrategy.java +++ b/src/main/java/graphql/execution/AsyncExecutionStrategy.java @@ -2,15 +2,17 @@ import graphql.ExecutionResult; import graphql.PublicApi; +import graphql.execution.incremental.DeferredExecutionSupport; import graphql.execution.instrumentation.ExecutionStrategyInstrumentationContext; import graphql.execution.instrumentation.Instrumentation; import graphql.execution.instrumentation.parameters.InstrumentationExecutionStrategyParameters; +import graphql.introspection.Introspection; import java.util.List; +import java.util.Optional; import java.util.concurrent.CompletableFuture; import java.util.function.BiConsumer; - /** * The standard graphql execution strategy that runs fields asynchronously non-blocking. */ @@ -36,7 +38,8 @@ public AsyncExecutionStrategy(DataFetcherExceptionHandler exceptionHandler) { @Override @SuppressWarnings("FutureReturnValueIgnored") public CompletableFuture execute(ExecutionContext executionContext, ExecutionStrategyParameters parameters) throws NonNullableFieldWasNullException { - + DataLoaderDispatchStrategy dataLoaderDispatcherStrategy = executionContext.getDataLoaderDispatcherStrategy(); + dataLoaderDispatcherStrategy.executionStrategy(executionContext, parameters); Instrumentation instrumentation = executionContext.getInstrumentation(); InstrumentationExecutionStrategyParameters instrumentationParameters = new InstrumentationExecutionStrategyParameters(executionContext, parameters); @@ -44,37 +47,39 @@ public CompletableFuture execute(ExecutionContext executionCont MergedSelectionSet fields = parameters.getFields(); List fieldNames = fields.getKeys(); - Async.CombinedBuilder futures = Async.ofExpectedSize(fields.size()); - for (String fieldName : fieldNames) { - MergedField currentField = fields.getSubField(fieldName); - - ResultPath fieldPath = parameters.getPath().segment(mkNameForPath(currentField)); - ExecutionStrategyParameters newParameters = parameters - .transform(builder -> builder.field(currentField).path(fieldPath).parent(parameters)); - CompletableFuture future = resolveFieldWithInfo(executionContext, newParameters); - futures.add(future); + Optional isNotSensible = Introspection.isIntrospectionSensible(fields, executionContext); + if (isNotSensible.isPresent()) { + return CompletableFuture.completedFuture(isNotSensible.get()); } + + DeferredExecutionSupport deferredExecutionSupport = createDeferredExecutionSupport(executionContext, parameters); + Async.CombinedBuilder futures = getAsyncFieldValueInfo(executionContext, parameters, deferredExecutionSupport); + CompletableFuture overallResult = new CompletableFuture<>(); - executionStrategyCtx.onDispatched(overallResult); + executionStrategyCtx.onDispatched(); futures.await().whenComplete((completeValueInfos, throwable) -> { - BiConsumer, Throwable> handleResultsConsumer = handleResults(executionContext, fieldNames, overallResult); + List fieldsExecutedOnInitialResult = deferredExecutionSupport.getNonDeferredFieldNames(fieldNames); + + BiConsumer, Throwable> handleResultsConsumer = handleResults(executionContext, fieldsExecutedOnInitialResult, overallResult); if (throwable != null) { handleResultsConsumer.accept(null, throwable.getCause()); return; } - Async.CombinedBuilder executionResultFutures = Async.ofExpectedSize(completeValueInfos.size()); + Async.CombinedBuilder fieldValuesFutures = Async.ofExpectedSize(completeValueInfos.size()); for (FieldValueInfo completeValueInfo : completeValueInfos) { - executionResultFutures.add(completeValueInfo.getFieldValue()); + fieldValuesFutures.addObject(completeValueInfo.getFieldValueObject()); } + dataLoaderDispatcherStrategy.executionStrategyOnFieldValuesInfo(completeValueInfos, parameters); executionStrategyCtx.onFieldValuesInfo(completeValueInfos); - executionResultFutures.await().whenComplete(handleResultsConsumer); + fieldValuesFutures.await().whenComplete(handleResultsConsumer); }).exceptionally((ex) -> { // if there are any issues with combining/handling the field results, // complete the future at all costs and bubble up any thrown exception so // the execution does not hang. + dataLoaderDispatcherStrategy.executionStrategyOnFieldValuesException(ex, parameters); executionStrategyCtx.onFieldValuesException(); overallResult.completeExceptionally(ex); return null; diff --git a/src/main/java/graphql/execution/AsyncSerialExecutionStrategy.java b/src/main/java/graphql/execution/AsyncSerialExecutionStrategy.java index fc2dde0980..6f64b8cd8c 100644 --- a/src/main/java/graphql/execution/AsyncSerialExecutionStrategy.java +++ b/src/main/java/graphql/execution/AsyncSerialExecutionStrategy.java @@ -6,8 +6,10 @@ import graphql.execution.instrumentation.Instrumentation; import graphql.execution.instrumentation.InstrumentationContext; import graphql.execution.instrumentation.parameters.InstrumentationExecutionStrategyParameters; +import graphql.introspection.Introspection; import java.util.List; +import java.util.Optional; import java.util.concurrent.CompletableFuture; import static graphql.execution.instrumentation.SimpleInstrumentationContext.nonNullCtx; @@ -30,6 +32,7 @@ public AsyncSerialExecutionStrategy(DataFetcherExceptionHandler exceptionHandler @Override @SuppressWarnings({"TypeParameterUnusedInFormals", "FutureReturnValueIgnored"}) public CompletableFuture execute(ExecutionContext executionContext, ExecutionStrategyParameters parameters) throws NonNullableFieldWasNullException { + executionContext.getDataLoaderDispatcherStrategy().executionStrategy(executionContext, parameters); Instrumentation instrumentation = executionContext.getInstrumentation(); InstrumentationExecutionStrategyParameters instrumentationParameters = new InstrumentationExecutionStrategyParameters(executionContext, parameters); @@ -39,7 +42,14 @@ public CompletableFuture execute(ExecutionContext executionCont MergedSelectionSet fields = parameters.getFields(); ImmutableList fieldNames = ImmutableList.copyOf(fields.keySet()); - CompletableFuture> resultsFuture = Async.eachSequentially(fieldNames, (fieldName, prevResults) -> { + // this is highly unlikely since Mutations cant do introspection BUT in theory someone could make the query strategy this code + // so belts and braces + Optional isNotSensible = Introspection.isIntrospectionSensible(fields, executionContext); + if (isNotSensible.isPresent()) { + return CompletableFuture.completedFuture(isNotSensible.get()); + } + + CompletableFuture> resultsFuture = Async.eachSequentially(fieldNames, (fieldName, prevResults) -> { MergedField currentField = fields.getSubField(fieldName); ResultPath fieldPath = parameters.getPath().segment(mkNameForPath(currentField)); ExecutionStrategyParameters newParameters = parameters @@ -48,7 +58,7 @@ public CompletableFuture execute(ExecutionContext executionCont }); CompletableFuture overallResult = new CompletableFuture<>(); - executionStrategyCtx.onDispatched(overallResult); + executionStrategyCtx.onDispatched(); resultsFuture.whenComplete(handleResults(executionContext, fieldNames, overallResult)); overallResult.whenComplete(executionStrategyCtx::onCompleted); diff --git a/src/main/java/graphql/execution/DataFetcherExceptionHandler.java b/src/main/java/graphql/execution/DataFetcherExceptionHandler.java index 7b7d294a0b..6daafd94dd 100644 --- a/src/main/java/graphql/execution/DataFetcherExceptionHandler.java +++ b/src/main/java/graphql/execution/DataFetcherExceptionHandler.java @@ -1,6 +1,5 @@ package graphql.execution; -import graphql.DeprecatedAt; import graphql.ExecutionResult; import graphql.PublicSpi; import graphql.schema.DataFetcher; diff --git a/src/main/java/graphql/execution/DataFetcherResult.java b/src/main/java/graphql/execution/DataFetcherResult.java index 460b07daf9..3baa4f4fec 100644 --- a/src/main/java/graphql/execution/DataFetcherResult.java +++ b/src/main/java/graphql/execution/DataFetcherResult.java @@ -1,7 +1,6 @@ package graphql.execution; import com.google.common.collect.ImmutableList; -import graphql.DeprecatedAt; import graphql.ExecutionResult; import graphql.GraphQLError; import graphql.Internal; @@ -49,8 +48,7 @@ public class DataFetcherResult { * @deprecated use the {@link #newResult()} builder instead */ @Internal - @Deprecated - @DeprecatedAt("2019-01-11") + @Deprecated(since = "2019-01-11") public DataFetcherResult(T data, List errors) { this(data, errors, null, null); } diff --git a/src/main/java/graphql/execution/DataLoaderDispatchStrategy.java b/src/main/java/graphql/execution/DataLoaderDispatchStrategy.java new file mode 100644 index 0000000000..ee3c0bd97c --- /dev/null +++ b/src/main/java/graphql/execution/DataLoaderDispatchStrategy.java @@ -0,0 +1,56 @@ +package graphql.execution; + +import graphql.Internal; +import graphql.schema.DataFetcher; + +import java.util.List; +import java.util.concurrent.CompletableFuture; + +@Internal +public interface DataLoaderDispatchStrategy { + + DataLoaderDispatchStrategy NO_OP = new DataLoaderDispatchStrategy() { + }; + + + default void executionStrategy(ExecutionContext executionContext, ExecutionStrategyParameters parameters) { + + } + + default void executionStrategyOnFieldValuesInfo(List fieldValueInfoList, ExecutionStrategyParameters parameters) { + + } + + default void executionStrategyOnFieldValuesException(Throwable t, ExecutionStrategyParameters parameters) { + + } + + + default void executeObject(ExecutionContext executionContext, ExecutionStrategyParameters executionStrategyParameters) { + + } + + default void executeObjectOnFieldValuesInfo(List fieldValueInfoList, ExecutionStrategyParameters parameters) { + + } + + default void executeObjectOnFieldValuesException(Throwable t, ExecutionStrategyParameters parameters) { + + } + + default void fieldFetched(ExecutionContext executionContext, + ExecutionStrategyParameters executionStrategyParameters, + DataFetcher dataFetcher, + Object fetchedValue) { + + } + + + default DataFetcher modifyDataFetcher(DataFetcher dataFetcher) { + return dataFetcher; + } + + default void deferredField(ExecutionContext executionContext, MergedField currentField) { + + } +} diff --git a/src/main/java/graphql/execution/Execution.java b/src/main/java/graphql/execution/Execution.java index 916bc64659..584449553a 100644 --- a/src/main/java/graphql/execution/Execution.java +++ b/src/main/java/graphql/execution/Execution.java @@ -4,15 +4,21 @@ import graphql.ExecutionInput; import graphql.ExecutionResult; import graphql.ExecutionResultImpl; +import graphql.ExperimentalApi; import graphql.GraphQLContext; import graphql.GraphQLError; import graphql.Internal; +import graphql.execution.incremental.IncrementalCallState; import graphql.execution.instrumentation.Instrumentation; import graphql.execution.instrumentation.InstrumentationContext; import graphql.execution.instrumentation.InstrumentationState; +import graphql.execution.instrumentation.dataloader.FallbackDataLoaderDispatchStrategy; +import graphql.execution.instrumentation.dataloader.PerLevelDataLoaderDispatchStrategy; import graphql.execution.instrumentation.parameters.InstrumentationExecuteOperationParameters; import graphql.execution.instrumentation.parameters.InstrumentationExecutionParameters; import graphql.extensions.ExtensionsBuilder; +import graphql.incremental.DelayedIncrementalPartialResult; +import graphql.incremental.IncrementalExecutionResultImpl; import graphql.language.Document; import graphql.language.FragmentDefinition; import graphql.language.NodeUtil; @@ -21,37 +27,43 @@ import graphql.schema.GraphQLObjectType; import graphql.schema.GraphQLSchema; import graphql.schema.impl.SchemaUtil; -import graphql.util.LogKit; -import org.slf4j.Logger; +import org.reactivestreams.Publisher; import java.util.Collections; import java.util.List; import java.util.Map; +import java.util.Optional; import java.util.concurrent.CompletableFuture; import static graphql.execution.ExecutionContextBuilder.newExecutionContextBuilder; import static graphql.execution.ExecutionStepInfo.newExecutionStepInfo; import static graphql.execution.ExecutionStrategyParameters.newParameters; import static graphql.execution.instrumentation.SimpleInstrumentationContext.nonNullCtx; +import static graphql.execution.instrumentation.dataloader.EmptyDataLoaderRegistryInstance.EMPTY_DATALOADER_REGISTRY; import static java.util.concurrent.CompletableFuture.completedFuture; @Internal public class Execution { - private static final Logger logNotSafe = LogKit.getNotPrivacySafeLogger(Execution.class); - private final FieldCollector fieldCollector = new FieldCollector(); private final ExecutionStrategy queryStrategy; private final ExecutionStrategy mutationStrategy; private final ExecutionStrategy subscriptionStrategy; private final Instrumentation instrumentation; private final ValueUnboxer valueUnboxer; + private final boolean doNotAutomaticallyDispatchDataLoader; - public Execution(ExecutionStrategy queryStrategy, ExecutionStrategy mutationStrategy, ExecutionStrategy subscriptionStrategy, Instrumentation instrumentation, ValueUnboxer valueUnboxer) { + public Execution(ExecutionStrategy queryStrategy, + ExecutionStrategy mutationStrategy, + ExecutionStrategy subscriptionStrategy, + Instrumentation instrumentation, + ValueUnboxer valueUnboxer, + boolean doNotAutomaticallyDispatchDataLoader) { this.queryStrategy = queryStrategy != null ? queryStrategy : new AsyncExecutionStrategy(); this.mutationStrategy = mutationStrategy != null ? mutationStrategy : new AsyncSerialExecutionStrategy(); this.subscriptionStrategy = subscriptionStrategy != null ? subscriptionStrategy : new AsyncExecutionStrategy(); this.instrumentation = instrumentation; this.valueUnboxer = valueUnboxer; + this.doNotAutomaticallyDispatchDataLoader = doNotAutomaticallyDispatchDataLoader; } public CompletableFuture execute(Document document, GraphQLSchema graphQLSchema, ExecutionId executionId, ExecutionInput executionInput, InstrumentationState instrumentationState) { @@ -95,9 +107,10 @@ public CompletableFuture execute(Document document, GraphQLSche .executionInput(executionInput) .build(); + executionContext.getGraphQLContext().put(ResultNodesInfo.RESULT_NODES_INFO, executionContext.getResultNodesInfo()); InstrumentationExecutionParameters parameters = new InstrumentationExecutionParameters( - executionInput, graphQLSchema, instrumentationState + executionInput, graphQLSchema ); executionContext = instrumentation.instrumentExecutionContext(executionContext, parameters, instrumentationState); return executeOperation(executionContext, executionInput.getRoot(), executionContext.getOperationDefinition()); @@ -122,7 +135,7 @@ private CompletableFuture executeOperation(ExecutionContext exe ExecutionResult executionResult = new ExecutionResultImpl(Collections.singletonList((GraphQLError) rte)); CompletableFuture resultCompletableFuture = completedFuture(executionResult); - executeOperationCtx.onDispatched(resultCompletableFuture); + executeOperationCtx.onDispatched(); executeOperationCtx.onCompleted(executionResult, rte); return resultCompletableFuture; } @@ -137,7 +150,13 @@ private CompletableFuture executeOperation(ExecutionContext exe .graphQLContext(graphQLContext) .build(); - MergedSelectionSet fields = fieldCollector.collectFields(collectorParameters, operationDefinition.getSelectionSet()); + MergedSelectionSet fields = fieldCollector.collectFields( + collectorParameters, + operationDefinition.getSelectionSet(), + Optional.ofNullable(executionContext.getGraphQLContext()) + .map(graphqlContext -> (Boolean) graphqlContext.get(ExperimentalApi.ENABLE_INCREMENTAL_SUPPORT)) + .orElse(false) + ); ResultPath path = ResultPath.rootPath(); ExecutionStepInfo executionStepInfo = newExecutionStepInfo().type(operationRootType).path(path).build(); @@ -152,12 +171,12 @@ private CompletableFuture executeOperation(ExecutionContext exe .path(path) .build(); + CompletableFuture result; try { ExecutionStrategy executionStrategy = executionContext.getStrategy(operation); - if (logNotSafe.isDebugEnabled()) { - logNotSafe.debug("Executing '{}' query operation: '{}' using '{}' execution strategy", executionContext.getExecutionId(), operation, executionStrategy.getClass().getName()); - } + DataLoaderDispatchStrategy dataLoaderDispatchStrategy = createDataLoaderDispatchStrategy(executionContext, executionStrategy); + executionContext.setDataLoaderDispatcherStrategy(dataLoaderDispatchStrategy); result = executionStrategy.execute(executionContext, parameters); } catch (NonNullableFieldWasNullException e) { // this means it was non-null types all the way from an offending non-null type @@ -171,15 +190,51 @@ private CompletableFuture executeOperation(ExecutionContext exe } // note this happens NOW - not when the result completes - executeOperationCtx.onDispatched(result); + executeOperationCtx.onDispatched(); // fill out extensions if we have them result = result.thenApply(er -> mergeExtensionsBuilderIfPresent(er, graphQLContext)); result = result.whenComplete(executeOperationCtx::onCompleted); - return result; + + return incrementalSupport(executionContext, result); + } + + /* + * Adds the deferred publisher if it's needed at the end of the query. This is also a good time for the deferred code to start running + */ + private CompletableFuture incrementalSupport(ExecutionContext executionContext, CompletableFuture result) { + return result.thenApply(er -> { + IncrementalCallState incrementalCallState = executionContext.getIncrementalCallState(); + if (incrementalCallState.getIncrementalCallsDetected()) { + // we start the rest of the query now to maximize throughput. We have the initial important results, + // and now we can start the rest of the calls as early as possible (even before someone subscribes) + Publisher publisher = incrementalCallState.startDeferredCalls(); + + return IncrementalExecutionResultImpl.fromExecutionResult(er) + // "hasNext" can, in theory, be "false" when all the incremental items are delivered in the + // first response payload. However, the current implementation will never result in this. + // The behaviour might change if we decide to make optimizations in the future. + .hasNext(true) + .incrementalItemPublisher(publisher) + .build(); + } + return er; + }); } + private DataLoaderDispatchStrategy createDataLoaderDispatchStrategy(ExecutionContext executionContext, ExecutionStrategy executionStrategy) { + if (executionContext.getDataLoaderRegistry() == EMPTY_DATALOADER_REGISTRY || doNotAutomaticallyDispatchDataLoader) { + return DataLoaderDispatchStrategy.NO_OP; + } + if (executionStrategy instanceof AsyncExecutionStrategy) { + return new PerLevelDataLoaderDispatchStrategy(executionContext); + } else { + return new FallbackDataLoaderDispatchStrategy(executionContext); + } + } + + private void addExtensionsBuilderNotPresent(GraphQLContext graphQLContext) { Object builder = graphQLContext.get(ExtensionsBuilder.class); if (builder == null) { diff --git a/src/main/java/graphql/execution/ExecutionContext.java b/src/main/java/graphql/execution/ExecutionContext.java index 50db89b566..e459cbe0ad 100644 --- a/src/main/java/graphql/execution/ExecutionContext.java +++ b/src/main/java/graphql/execution/ExecutionContext.java @@ -3,12 +3,13 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; -import graphql.DeprecatedAt; import graphql.ExecutionInput; import graphql.GraphQLContext; import graphql.GraphQLError; +import graphql.Internal; import graphql.PublicApi; import graphql.collect.ImmutableKit; +import graphql.execution.incremental.IncrementalCallState; import graphql.execution.instrumentation.Instrumentation; import graphql.execution.instrumentation.InstrumentationState; import graphql.language.Document; @@ -54,10 +55,16 @@ public class ExecutionContext { private final Set errorPaths = new HashSet<>(); private final DataLoaderRegistry dataLoaderRegistry; private final Locale locale; + private final IncrementalCallState incrementalCallState = new IncrementalCallState(); private final ValueUnboxer valueUnboxer; private final ExecutionInput executionInput; private final Supplier queryTree; + // this is modified after creation so it needs to be volatile to ensure visibility across Threads + private volatile DataLoaderDispatchStrategy dataLoaderDispatcherStrategy = DataLoaderDispatchStrategy.NO_OP; + + private final ResultNodesInfo resultNodesInfo = new ResultNodesInfo(); + ExecutionContext(ExecutionContextBuilder builder) { this.graphQLSchema = builder.graphQLSchema; this.executionId = builder.executionId; @@ -79,7 +86,7 @@ public class ExecutionContext { this.errors.set(builder.errors); this.localContext = builder.localContext; this.executionInput = builder.executionInput; - queryTree = FpKit.interThreadMemoize(() -> ExecutableNormalizedOperationFactory.createExecutableNormalizedOperation(graphQLSchema, operationDefinition, fragmentsByName, coercedVariables)); + this.queryTree = FpKit.interThreadMemoize(() -> ExecutableNormalizedOperationFactory.createExecutableNormalizedOperation(graphQLSchema, operationDefinition, fragmentsByName, coercedVariables)); } @@ -115,17 +122,6 @@ public OperationDefinition getOperationDefinition() { return operationDefinition; } - /** - * @return map of coerced variables - * - * @deprecated use {@link #getCoercedVariables()} instead - */ - @Deprecated - @DeprecatedAt("2022-05-24") - public Map getVariables() { - return coercedVariables.toMap(); - } - public CoercedVariables getCoercedVariables() { return coercedVariables; } @@ -137,8 +133,7 @@ public CoercedVariables getCoercedVariables() { * * @deprecated use {@link #getGraphQLContext()} instead */ - @Deprecated - @DeprecatedAt("2021-07-05") + @Deprecated(since = "2021-07-05") @SuppressWarnings({"unchecked", "TypeParameterUnusedInFormals"}) public T getContext() { return (T) context; @@ -248,7 +243,9 @@ public List getErrors() { return errors.get(); } - public ExecutionStrategy getQueryStrategy() { return queryStrategy; } + public ExecutionStrategy getQueryStrategy() { + return queryStrategy; + } public ExecutionStrategy getMutationStrategy() { return mutationStrategy; @@ -258,6 +255,10 @@ public ExecutionStrategy getSubscriptionStrategy() { return subscriptionStrategy; } + public IncrementalCallState getIncrementalCallState() { + return incrementalCallState; + } + public ExecutionStrategy getStrategy(OperationDefinition.Operation operation) { if (operation == OperationDefinition.Operation.MUTATION) { return getMutationStrategy(); @@ -272,6 +273,16 @@ public Supplier getNormalizedQueryTree() { return queryTree; } + @Internal + public void setDataLoaderDispatcherStrategy(DataLoaderDispatchStrategy dataLoaderDispatcherStrategy) { + this.dataLoaderDispatcherStrategy = dataLoaderDispatcherStrategy; + } + + @Internal + public DataLoaderDispatchStrategy getDataLoaderDispatcherStrategy() { + return dataLoaderDispatcherStrategy; + } + /** * This helps you transform the current ExecutionContext object into another one by starting a builder with all * the current values and allows you to transform it how you want. @@ -285,4 +296,8 @@ public ExecutionContext transform(Consumer builderConsu builderConsumer.accept(builder); return builder.build(); } + + public ResultNodesInfo getResultNodesInfo() { + return resultNodesInfo; + } } diff --git a/src/main/java/graphql/execution/ExecutionContextBuilder.java b/src/main/java/graphql/execution/ExecutionContextBuilder.java index f941be07b7..7220e8dee7 100644 --- a/src/main/java/graphql/execution/ExecutionContextBuilder.java +++ b/src/main/java/graphql/execution/ExecutionContextBuilder.java @@ -2,7 +2,6 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; -import graphql.DeprecatedAt; import graphql.ExecutionInput; import graphql.GraphQLContext; import graphql.GraphQLError; @@ -131,8 +130,7 @@ public ExecutionContextBuilder subscriptionStrategy(ExecutionStrategy subscripti /* * @deprecated use {@link #graphQLContext(GraphQLContext)} instead */ - @Deprecated - @DeprecatedAt("2021-07-05") + @Deprecated(since = "2021-07-05") public ExecutionContextBuilder context(Object context) { this.context = context; return this; @@ -159,8 +157,7 @@ public ExecutionContextBuilder root(Object root) { * * @deprecated use {@link #coercedVariables(CoercedVariables)} instead */ - @Deprecated - @DeprecatedAt("2022-05-24") + @Deprecated(since = "2022-05-24") public ExecutionContextBuilder variables(Map variables) { this.coercedVariables = CoercedVariables.of(variables); return this; diff --git a/src/main/java/graphql/execution/ExecutionId.java b/src/main/java/graphql/execution/ExecutionId.java index 2c10a6a719..933a5369c8 100644 --- a/src/main/java/graphql/execution/ExecutionId.java +++ b/src/main/java/graphql/execution/ExecutionId.java @@ -2,8 +2,7 @@ import graphql.Assert; import graphql.PublicApi; - -import java.util.UUID; +import graphql.util.IdGenerator; /** * This opaque identifier is used to identify a unique query execution @@ -17,7 +16,7 @@ public class ExecutionId { * @return a query execution identifier */ public static ExecutionId generate() { - return new ExecutionId(UUID.randomUUID().toString()); + return new ExecutionId(IdGenerator.uuid().toString()); } /** diff --git a/src/main/java/graphql/execution/ExecutionStepInfo.java b/src/main/java/graphql/execution/ExecutionStepInfo.java index 884ca79179..27313bb965 100644 --- a/src/main/java/graphql/execution/ExecutionStepInfo.java +++ b/src/main/java/graphql/execution/ExecutionStepInfo.java @@ -1,6 +1,5 @@ package graphql.execution; -import graphql.DeprecatedAt; import graphql.PublicApi; import graphql.collect.ImmutableMapWithNullValues; import graphql.schema.GraphQLFieldDefinition; @@ -84,8 +83,7 @@ private ExecutionStepInfo(Builder builder) { * @see ExecutionStepInfo#getObjectType() * @deprecated use {@link #getObjectType()} instead as it is named better */ - @Deprecated - @DeprecatedAt("2022-02-03") + @Deprecated(since = "2022-02-03") public GraphQLObjectType getFieldContainer() { return fieldContainer; } @@ -301,11 +299,8 @@ public Builder path(ResultPath resultPath) { return this; } - public Builder arguments(Supplier> arguments) { - this.arguments = () -> { - Map map = arguments.get(); - return map == null ? ImmutableMapWithNullValues.emptyMap() : ImmutableMapWithNullValues.copyOf(map); - }; + public Builder arguments(Supplier> arguments) { + this.arguments = arguments; return this; } diff --git a/src/main/java/graphql/execution/ExecutionStrategy.java b/src/main/java/graphql/execution/ExecutionStrategy.java index 4ed0f1e644..5e749360a5 100644 --- a/src/main/java/graphql/execution/ExecutionStrategy.java +++ b/src/main/java/graphql/execution/ExecutionStrategy.java @@ -1,8 +1,10 @@ package graphql.execution; import com.google.common.collect.ImmutableList; +import com.google.common.collect.Maps; import graphql.ExecutionResult; import graphql.ExecutionResultImpl; +import graphql.ExperimentalApi; import graphql.GraphQLError; import graphql.Internal; import graphql.PublicSpi; @@ -10,11 +12,14 @@ import graphql.TrivialDataFetcher; import graphql.TypeMismatchError; import graphql.UnresolvedTypeError; -import graphql.collect.ImmutableKit; +import graphql.collect.ImmutableMapWithNullValues; import graphql.execution.directives.QueryDirectives; import graphql.execution.directives.QueryDirectivesImpl; +import graphql.execution.incremental.DeferredExecutionSupport; +import graphql.execution.instrumentation.ExecuteObjectInstrumentationContext; import graphql.execution.instrumentation.Instrumentation; import graphql.execution.instrumentation.InstrumentationContext; +import graphql.execution.instrumentation.parameters.InstrumentationExecutionStrategyParameters; import graphql.execution.instrumentation.parameters.InstrumentationFieldCompleteParameters; import graphql.execution.instrumentation.parameters.InstrumentationFieldFetchParameters; import graphql.execution.instrumentation.parameters.InstrumentationFieldParameters; @@ -40,17 +45,17 @@ import graphql.schema.GraphQLType; import graphql.schema.LightDataFetcher; import graphql.util.FpKit; -import graphql.util.LogKit; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import org.jetbrains.annotations.NotNull; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Map; +import java.util.Optional; import java.util.OptionalInt; import java.util.concurrent.CompletableFuture; import java.util.concurrent.CompletionException; +import java.util.function.BiConsumer; import java.util.function.Function; import java.util.function.Supplier; @@ -62,12 +67,12 @@ import static graphql.execution.FieldValueInfo.CompleteValueType.NULL; import static graphql.execution.FieldValueInfo.CompleteValueType.OBJECT; import static graphql.execution.FieldValueInfo.CompleteValueType.SCALAR; +import static graphql.execution.ResultNodesInfo.MAX_RESULT_NODES; import static graphql.execution.instrumentation.SimpleInstrumentationContext.nonNullCtx; import static graphql.schema.DataFetchingEnvironmentImpl.newDataFetchingEnvironment; import static graphql.schema.GraphQLTypeUtil.isEnum; import static graphql.schema.GraphQLTypeUtil.isList; import static graphql.schema.GraphQLTypeUtil.isScalar; -import static java.util.concurrent.CompletableFuture.completedFuture; /** * An execution strategy is give a list of fields from the graphql query to execute and find values for using a recursive strategy. @@ -126,14 +131,11 @@ @SuppressWarnings("FutureReturnValueIgnored") public abstract class ExecutionStrategy { - private static final Logger log = LoggerFactory.getLogger(ExecutionStrategy.class); - private static final Logger logNotSafe = LogKit.getNotPrivacySafeLogger(ExecutionStrategy.class); - protected final FieldCollector fieldCollector = new FieldCollector(); protected final ExecutionStepInfoFactory executionStepInfoFactory = new ExecutionStepInfoFactory(); + protected final DataFetcherExceptionHandler dataFetcherExceptionHandler; private final ResolveType resolvedType = new ResolveType(); - protected final DataFetcherExceptionHandler dataFetcherExceptionHandler; /** * The default execution strategy constructor uses the {@link SimpleDataFetcherExceptionHandler} @@ -143,6 +145,7 @@ protected ExecutionStrategy() { dataFetcherExceptionHandler = new SimpleDataFetcherExceptionHandler(); } + /** * The consumers of the execution strategy can pass in a {@link DataFetcherExceptionHandler} to better * decide what do when a data fetching error happens @@ -153,6 +156,23 @@ protected ExecutionStrategy(DataFetcherExceptionHandler dataFetcherExceptionHand this.dataFetcherExceptionHandler = dataFetcherExceptionHandler; } + + @Internal + public static String mkNameForPath(Field currentField) { + return mkNameForPath(Collections.singletonList(currentField)); + } + + @Internal + public static String mkNameForPath(MergedField mergedField) { + return mkNameForPath(mergedField.getFields()); + } + + @Internal + public static String mkNameForPath(List currentField) { + Field field = currentField.get(0); + return field.getResultKey(); + } + /** * This is the entry point to an execution strategy. It will be passed the fields to execute and get values for. * @@ -161,28 +181,188 @@ protected ExecutionStrategy(DataFetcherExceptionHandler dataFetcherExceptionHand * * @return a promise to an {@link ExecutionResult} * - * @throws NonNullableFieldWasNullException in the future if a non null field resolves to a null value + * @throws NonNullableFieldWasNullException in the future if a non-null field resolves to a null value */ public abstract CompletableFuture execute(ExecutionContext executionContext, ExecutionStrategyParameters parameters) throws NonNullableFieldWasNullException; + /** + * This is the re-entry point for an execution strategy when an object type needs to be resolved. + * + * @param executionContext contains the top level execution parameters + * @param parameters contains the parameters holding the fields to be executed and source object + * + * @return a {@link CompletableFuture} promise to a map of object field values or a materialized map of object field values + * + * @throws NonNullableFieldWasNullException in the {@link CompletableFuture} if a non-null field resolved to a null value + */ + @SuppressWarnings("unchecked") + protected Object /* CompletableFuture> | Map */ + executeObject(ExecutionContext executionContext, ExecutionStrategyParameters parameters) throws NonNullableFieldWasNullException { + DataLoaderDispatchStrategy dataLoaderDispatcherStrategy = executionContext.getDataLoaderDispatcherStrategy(); + dataLoaderDispatcherStrategy.executeObject(executionContext, parameters); + Instrumentation instrumentation = executionContext.getInstrumentation(); + InstrumentationExecutionStrategyParameters instrumentationParameters = new InstrumentationExecutionStrategyParameters(executionContext, parameters); + + ExecuteObjectInstrumentationContext resolveObjectCtx = ExecuteObjectInstrumentationContext.nonNullCtx( + instrumentation.beginExecuteObject(instrumentationParameters, executionContext.getInstrumentationState()) + ); + + List fieldNames = parameters.getFields().getKeys(); + + DeferredExecutionSupport deferredExecutionSupport = createDeferredExecutionSupport(executionContext, parameters); + Async.CombinedBuilder resolvedFieldFutures = getAsyncFieldValueInfo(executionContext, parameters, deferredExecutionSupport); + + CompletableFuture> overallResult = new CompletableFuture<>(); + List fieldsExecutedOnInitialResult = deferredExecutionSupport.getNonDeferredFieldNames(fieldNames); + BiConsumer, Throwable> handleResultsConsumer = buildFieldValueMap(fieldsExecutedOnInitialResult, overallResult, executionContext); + + resolveObjectCtx.onDispatched(); + + Object fieldValueInfosResult = resolvedFieldFutures.awaitPolymorphic(); + if (fieldValueInfosResult instanceof CompletableFuture) { + CompletableFuture> fieldValueInfos = (CompletableFuture>) fieldValueInfosResult; + fieldValueInfos.whenComplete((completeValueInfos, throwable) -> { + if (throwable != null) { + handleResultsConsumer.accept(null, throwable); + return; + } + + Async.CombinedBuilder resultFutures = fieldValuesCombinedBuilder(completeValueInfos); + dataLoaderDispatcherStrategy.executeObjectOnFieldValuesInfo(completeValueInfos, parameters); + resolveObjectCtx.onFieldValuesInfo(completeValueInfos); + resultFutures.await().whenComplete(handleResultsConsumer); + }).exceptionally((ex) -> { + // if there are any issues with combining/handling the field results, + // complete the future at all costs and bubble up any thrown exception so + // the execution does not hang. + dataLoaderDispatcherStrategy.executeObjectOnFieldValuesException(ex, parameters); + resolveObjectCtx.onFieldValuesException(); + overallResult.completeExceptionally(ex); + return null; + }); + overallResult.whenComplete(resolveObjectCtx::onCompleted); + return overallResult; + } else { + List completeValueInfos = (List) fieldValueInfosResult; + + Async.CombinedBuilder resultFutures = fieldValuesCombinedBuilder(completeValueInfos); + dataLoaderDispatcherStrategy.executeObjectOnFieldValuesInfo(completeValueInfos, parameters); + resolveObjectCtx.onFieldValuesInfo(completeValueInfos); + + Object completedValuesObject = resultFutures.awaitPolymorphic(); + if (completedValuesObject instanceof CompletableFuture) { + CompletableFuture> completedValues = (CompletableFuture>) completedValuesObject; + completedValues.whenComplete(handleResultsConsumer); + overallResult.whenComplete(resolveObjectCtx::onCompleted); + return overallResult; + } else { + Map fieldValueMap = buildFieldValueMap(fieldsExecutedOnInitialResult, (List) completedValuesObject); + resolveObjectCtx.onCompleted(fieldValueMap, null); + return fieldValueMap; + } + } + } + + @NotNull + private static Async.CombinedBuilder fieldValuesCombinedBuilder(List completeValueInfos) { + Async.CombinedBuilder resultFutures = Async.ofExpectedSize(completeValueInfos.size()); + for (FieldValueInfo completeValueInfo : completeValueInfos) { + resultFutures.addObject(completeValueInfo.getFieldValueObject()); + } + return resultFutures; + } + + private BiConsumer, Throwable> buildFieldValueMap(List fieldNames, CompletableFuture> overallResult, ExecutionContext executionContext) { + return (List results, Throwable exception) -> { + if (exception != null) { + handleValueException(overallResult, exception, executionContext); + return; + } + Map resolvedValuesByField = buildFieldValueMap(fieldNames, results); + overallResult.complete(resolvedValuesByField); + }; + } + + @NotNull + private static Map buildFieldValueMap(List fieldNames, List results) { + Map resolvedValuesByField = Maps.newLinkedHashMapWithExpectedSize(fieldNames.size()); + int ix = 0; + for (Object fieldValue : results) { + String fieldName = fieldNames.get(ix++); + resolvedValuesByField.put(fieldName, fieldValue); + } + return resolvedValuesByField; + } + + DeferredExecutionSupport createDeferredExecutionSupport(ExecutionContext executionContext, ExecutionStrategyParameters parameters) { + MergedSelectionSet fields = parameters.getFields(); + + return Optional.ofNullable(executionContext.getGraphQLContext()) + .map(graphqlContext -> (Boolean) graphqlContext.get(ExperimentalApi.ENABLE_INCREMENTAL_SUPPORT)) + .orElse(false) ? + new DeferredExecutionSupport.DeferredExecutionSupportImpl( + fields, + parameters, + executionContext, + (ec, esp) -> Async.toCompletableFuture(resolveFieldWithInfo(ec, esp)) + ) : DeferredExecutionSupport.NOOP; + + } + + @NotNull + Async.CombinedBuilder getAsyncFieldValueInfo( + ExecutionContext executionContext, + ExecutionStrategyParameters parameters, + DeferredExecutionSupport deferredExecutionSupport + ) { + MergedSelectionSet fields = parameters.getFields(); + + executionContext.getIncrementalCallState().enqueue(deferredExecutionSupport.createCalls()); + + // Only non-deferred fields should be considered for calculating the expected size of futures. + Async.CombinedBuilder futures = Async + .ofExpectedSize(fields.size() - deferredExecutionSupport.deferredFieldsCount()); + + for (String fieldName : fields.getKeys()) { + MergedField currentField = fields.getSubField(fieldName); + + ResultPath fieldPath = parameters.getPath().segment(mkNameForPath(currentField)); + ExecutionStrategyParameters newParameters = parameters + .transform(builder -> builder.field(currentField).path(fieldPath).parent(parameters)); + + if (!deferredExecutionSupport.isDeferredField(currentField)) { + Object fieldValueInfo = resolveFieldWithInfo(executionContext, newParameters); + futures.addObject(fieldValueInfo); + } + } + return futures; + } + /** * Called to fetch a value for a field and resolve it further in terms of the graphql query. This will call - * #fetchField followed by #completeField and the completed {@link ExecutionResult} is returned. + * #fetchField followed by #completeField and the completed Object is returned. *

* An execution strategy can iterate the fields to be executed and call this method for each one *

* Graphql fragments mean that for any give logical field can have one or more {@link Field} values associated with it - * in the query, hence the fieldList. However the first entry is representative of the field for most purposes. + * in the query, hence the fieldList. However, the first entry is representative of the field for most purposes. * * @param executionContext contains the top level execution parameters * @param parameters contains the parameters holding the fields to be executed and source object * - * @return a promise to an {@link ExecutionResult} + * @return a {@link CompletableFuture} promise to an {@link Object} or the materialized {@link Object} * - * @throws NonNullableFieldWasNullException in the future if a non null field resolves to a null value + * @throws NonNullableFieldWasNullException in the future if a non-null field resolved to a null value */ - protected CompletableFuture resolveField(ExecutionContext executionContext, ExecutionStrategyParameters parameters) { - return resolveFieldWithInfo(executionContext, parameters).thenCompose(FieldValueInfo::getFieldValue); + @SuppressWarnings("unchecked") + protected Object /* CompletableFuture | Object */ + resolveField(ExecutionContext executionContext, ExecutionStrategyParameters parameters) { + Object fieldWithInfo = resolveFieldWithInfo(executionContext, parameters); + if (fieldWithInfo instanceof CompletableFuture) { + return ((CompletableFuture) fieldWithInfo).thenCompose(FieldValueInfo::getFieldValueFuture); + } else { + return ((FieldValueInfo) fieldWithInfo).getFieldValueObject(); + } } /** @@ -197,28 +377,42 @@ protected CompletableFuture resolveField(ExecutionContext execu * @param executionContext contains the top level execution parameters * @param parameters contains the parameters holding the fields to be executed and source object * - * @return a promise to a {@link FieldValueInfo} + * @return a {@link CompletableFuture} promise to a {@link FieldValueInfo} or a materialised {@link FieldValueInfo} * - * @throws NonNullableFieldWasNullException in the {@link FieldValueInfo#getFieldValue()} future if a non null field resolves to a null value + * @throws NonNullableFieldWasNullException in the {@link FieldValueInfo#getFieldValueFuture()} future + * if a nonnull field resolves to a null value */ - protected CompletableFuture resolveFieldWithInfo(ExecutionContext executionContext, ExecutionStrategyParameters parameters) { + @SuppressWarnings("unchecked") + protected Object /* CompletableFuture | FieldValueInfo */ + resolveFieldWithInfo(ExecutionContext executionContext, ExecutionStrategyParameters parameters) { GraphQLFieldDefinition fieldDef = getFieldDef(executionContext, parameters, parameters.getField().getSingleField()); Supplier executionStepInfo = FpKit.intraThreadMemoize(() -> createExecutionStepInfo(executionContext, parameters, fieldDef, null)); Instrumentation instrumentation = executionContext.getInstrumentation(); - InstrumentationContext fieldCtx = nonNullCtx(instrumentation.beginField( + InstrumentationContext fieldCtx = nonNullCtx(instrumentation.beginFieldExecution( new InstrumentationFieldParameters(executionContext, executionStepInfo), executionContext.getInstrumentationState() )); - CompletableFuture fetchFieldFuture = fetchField(executionContext, parameters); - CompletableFuture result = fetchFieldFuture.thenApply((fetchedValue) -> - completeField(executionContext, parameters, fetchedValue)); - - CompletableFuture executionResultFuture = result.thenCompose(FieldValueInfo::getFieldValue); + Object fetchedValueObj = fetchField(executionContext, parameters); + if (fetchedValueObj instanceof CompletableFuture) { + CompletableFuture fetchFieldFuture = (CompletableFuture) fetchedValueObj; + CompletableFuture result = fetchFieldFuture.thenApply((fetchedValue) -> + completeField(fieldDef, executionContext, parameters, fetchedValue)); - fieldCtx.onDispatched(executionResultFuture); - executionResultFuture.whenComplete(fieldCtx::onCompleted); - return result; + fieldCtx.onDispatched(); + result.whenComplete(fieldCtx::onCompleted); + return result; + } else { + try { + FetchedValue fetchedValue = (FetchedValue) fetchedValueObj; + FieldValueInfo fieldValueInfo = completeField(fieldDef, executionContext, parameters, fetchedValue); + fieldCtx.onDispatched(); + fieldCtx.onCompleted(fetchedValue.getFetchedValue(), null); + return fieldValueInfo; + } catch (Exception e) { + return Async.exceptionallyCompletedFuture(e); + } + } } /** @@ -231,15 +425,27 @@ protected CompletableFuture resolveFieldWithInfo(ExecutionContex * @param executionContext contains the top level execution parameters * @param parameters contains the parameters holding the fields to be executed and source object * - * @return a promise to a fetched object + * @return a promise to a {@link FetchedValue} object or the {@link FetchedValue} itself * * @throws NonNullableFieldWasNullException in the future if a non null field resolves to a null value */ - protected CompletableFuture fetchField(ExecutionContext executionContext, ExecutionStrategyParameters parameters) { + protected Object /*CompletableFuture | FetchedValue>*/ + fetchField(ExecutionContext executionContext, ExecutionStrategyParameters parameters) { MergedField field = parameters.getField(); GraphQLObjectType parentType = (GraphQLObjectType) parameters.getExecutionStepInfo().getUnwrappedNonNullType(); GraphQLFieldDefinition fieldDef = getFieldDef(executionContext.getGraphQLSchema(), parentType, field.getSingleField()); - GraphQLCodeRegistry codeRegistry = executionContext.getGraphQLSchema().getCodeRegistry(); + return fetchField(fieldDef, executionContext, parameters); + } + + private Object /*CompletableFuture | FetchedValue>*/ + fetchField(GraphQLFieldDefinition fieldDef, ExecutionContext executionContext, ExecutionStrategyParameters parameters) { + + if (incrementAndCheckMaxNodesExceeded(executionContext)) { + return new FetchedValue(null, Collections.emptyList(), null); + } + + MergedField field = parameters.getField(); + GraphQLObjectType parentType = (GraphQLObjectType) parameters.getExecutionStepInfo().getUnwrappedNonNullType(); // if the DF (like PropertyDataFetcher) does not use the arguments or execution step info then dont build any @@ -274,6 +480,8 @@ protected CompletableFuture fetchField(ExecutionContext executionC .queryDirectives(queryDirectives) .build(); }); + + GraphQLCodeRegistry codeRegistry = executionContext.getGraphQLSchema().getCodeRegistry(); DataFetcher dataFetcher = codeRegistry.getDataFetcher(parentType, fieldDef); Instrumentation instrumentation = executionContext.getInstrumentation(); @@ -284,25 +492,37 @@ protected CompletableFuture fetchField(ExecutionContext executionC ); dataFetcher = instrumentation.instrumentDataFetcher(dataFetcher, instrumentationFieldFetchParams, executionContext.getInstrumentationState()); - CompletableFuture fetchedValue = invokeDataFetcher(executionContext, parameters, fieldDef, dataFetchingEnvironment, dataFetcher); - - fetchCtx.onDispatched(fetchedValue); - return fetchedValue - .handle((result, exception) -> { - fetchCtx.onCompleted(result, exception); - if (exception != null) { - return handleFetchingException(dataFetchingEnvironment.get(), exception); - } else { - // we can simply return the fetched value CF and avoid a allocation - return fetchedValue; - } - }) - .thenCompose(Function.identity()) - .thenApply(result -> unboxPossibleDataFetcherResult(executionContext, parameters, result)); - } - - private CompletableFuture invokeDataFetcher(ExecutionContext executionContext, ExecutionStrategyParameters parameters, GraphQLFieldDefinition fieldDef, Supplier dataFetchingEnvironment, DataFetcher dataFetcher) { - CompletableFuture fetchedValue; + dataFetcher = executionContext.getDataLoaderDispatcherStrategy().modifyDataFetcher(dataFetcher); + Object fetchedObject = invokeDataFetcher(executionContext, parameters, fieldDef, dataFetchingEnvironment, dataFetcher); + executionContext.getDataLoaderDispatcherStrategy().fieldFetched(executionContext, parameters, dataFetcher, fetchedObject); + fetchCtx.onDispatched(); + if (fetchedObject instanceof CompletableFuture) { + @SuppressWarnings("unchecked") + CompletableFuture fetchedValue = (CompletableFuture) fetchedObject; + return fetchedValue + .handle((result, exception) -> { + fetchCtx.onCompleted(result, exception); + if (exception != null) { + return handleFetchingException(dataFetchingEnvironment.get(), parameters, exception); + } else { + // we can simply return the fetched value CF and avoid a allocation + return fetchedValue; + } + }) + .thenCompose(Function.identity()) + .thenApply(result -> unboxPossibleDataFetcherResult(executionContext, parameters, result)); + } else { + fetchCtx.onCompleted(fetchedObject, null); + return unboxPossibleDataFetcherResult(executionContext, parameters, fetchedObject); + } + } + + /* + * ExecutionContext is not used in the method, but the java agent uses it, so it needs to be present + */ + @SuppressWarnings("unused") + private Object invokeDataFetcher(ExecutionContext executionContext, ExecutionStrategyParameters parameters, GraphQLFieldDefinition fieldDef, Supplier dataFetchingEnvironment, DataFetcher dataFetcher) { + Object fetchedValue; try { Object fetchedValueRaw; if (dataFetcher instanceof LightDataFetcher) { @@ -310,11 +530,8 @@ private CompletableFuture invokeDataFetcher(ExecutionContext executionCo } else { fetchedValueRaw = dataFetcher.get(dataFetchingEnvironment.get()); } - fetchedValue = Async.toCompletableFuture(fetchedValueRaw); + fetchedValue = Async.toCompletableFutureOrMaterializedObject(fetchedValueRaw); } catch (Exception e) { - if (logNotSafe.isDebugEnabled()) { - logNotSafe.debug(String.format("'%s', field '%s' fetch threw exception", executionContext.getExecutionId(), parameters.getPath()), e); - } fetchedValue = Async.exceptionallyCompletedFuture(e); } return fetchedValue; @@ -339,18 +556,11 @@ protected FetchedValue unboxPossibleDataFetcherResult(ExecutionContext execution // if the field returns nothing then they get the context of their parent field localContext = parameters.getLocalContext(); } - return FetchedValue.newFetchedValue() - .fetchedValue(executionContext.getValueUnboxer().unbox(dataFetcherResult.getData())) - .rawFetchedValue(dataFetcherResult.getData()) - .errors(dataFetcherResult.getErrors()) - .localContext(localContext) - .build(); + Object unBoxedValue = executionContext.getValueUnboxer().unbox(dataFetcherResult.getData()); + return new FetchedValue(unBoxedValue, dataFetcherResult.getErrors(), localContext); } else { - return FetchedValue.newFetchedValue() - .fetchedValue(executionContext.getValueUnboxer().unbox(result)) - .rawFetchedValue(result) - .localContext(parameters.getLocalContext()) - .build(); + Object unBoxedValue = executionContext.getValueUnboxer().unbox(result); + return new FetchedValue(unBoxedValue, ImmutableList.of(), parameters.getLocalContext()); } } @@ -366,12 +576,20 @@ private void addExtensionsIfPresent(ExecutionContext executionContext, DataFetch protected CompletableFuture handleFetchingException( DataFetchingEnvironment environment, - Throwable e) { + ExecutionStrategyParameters parameters, + Throwable e + ) { DataFetcherExceptionHandlerParameters handlerParameters = DataFetcherExceptionHandlerParameters.newExceptionParameters() .dataFetchingEnvironment(environment) .exception(e) .build(); + parameters.getDeferredCallContext().onFetchingException( + parameters.getPath(), + parameters.getField().getSingleField().getSourceLocation(), + e + ); + try { return asyncHandleException(dataFetcherExceptionHandler, handlerParameters); } catch (Exception handlerException) { @@ -405,17 +623,23 @@ private CompletableFuture asyncHandleException(DataFetcherExceptionHandle * * @return a {@link FieldValueInfo} * - * @throws NonNullableFieldWasNullException in the {@link FieldValueInfo#getFieldValue()} future if a non null field resolves to a null value + * @throws NonNullableFieldWasNullException in the {@link FieldValueInfo#getFieldValueFuture()} future + * if a nonnull field resolves to a null value */ protected FieldValueInfo completeField(ExecutionContext executionContext, ExecutionStrategyParameters parameters, FetchedValue fetchedValue) { Field field = parameters.getField().getSingleField(); GraphQLObjectType parentType = (GraphQLObjectType) parameters.getExecutionStepInfo().getUnwrappedNonNullType(); GraphQLFieldDefinition fieldDef = getFieldDef(executionContext.getGraphQLSchema(), parentType, field); + return completeField(fieldDef, executionContext, parameters, fetchedValue); + } + + private FieldValueInfo completeField(GraphQLFieldDefinition fieldDef, ExecutionContext executionContext, ExecutionStrategyParameters parameters, FetchedValue fetchedValue) { + GraphQLObjectType parentType = (GraphQLObjectType) parameters.getExecutionStepInfo().getUnwrappedNonNullType(); ExecutionStepInfo executionStepInfo = createExecutionStepInfo(executionContext, parameters, fieldDef, parentType); Instrumentation instrumentation = executionContext.getInstrumentation(); InstrumentationFieldCompleteParameters instrumentationParams = new InstrumentationFieldCompleteParameters(executionContext, parameters, () -> executionStepInfo, fetchedValue); - InstrumentationContext ctxCompleteField = nonNullCtx(instrumentation.beginFieldComplete( + InstrumentationContext ctxCompleteField = nonNullCtx(instrumentation.beginFieldCompletion( instrumentationParams, executionContext.getInstrumentationState() )); @@ -428,19 +652,14 @@ protected FieldValueInfo completeField(ExecutionContext executionContext, Execut .nonNullFieldValidator(nonNullableFieldValidator) ); - if (log.isDebugEnabled()) { - log.debug("'{}' completing field '{}'...", executionContext.getExecutionId(), executionStepInfo.getPath()); - } - FieldValueInfo fieldValueInfo = completeValue(executionContext, newParameters); - CompletableFuture executionResultFuture = fieldValueInfo.getFieldValue(); - ctxCompleteField.onDispatched(executionResultFuture); + CompletableFuture executionResultFuture = fieldValueInfo.getFieldValueFuture(); + ctxCompleteField.onDispatched(); executionResultFuture.whenComplete(ctxCompleteField::onCompleted); return fieldValueInfo; } - /** * Called to complete a value for a field based on the type of the field. *

@@ -461,18 +680,18 @@ protected FieldValueInfo completeValue(ExecutionContext executionContext, Execut ExecutionStepInfo executionStepInfo = parameters.getExecutionStepInfo(); Object result = executionContext.getValueUnboxer().unbox(parameters.getSource()); GraphQLType fieldType = executionStepInfo.getUnwrappedNonNullType(); - CompletableFuture fieldValue; + Object fieldValue; if (result == null) { - return getFieldValueInfoForNull(executionContext, parameters); + return getFieldValueInfoForNull(parameters); } else if (isList(fieldType)) { return completeValueForList(executionContext, parameters, result); } else if (isScalar(fieldType)) { fieldValue = completeValueForScalar(executionContext, parameters, (GraphQLScalarType) fieldType, result); - return FieldValueInfo.newFieldValueInfo(SCALAR).fieldValue(fieldValue).build(); + return new FieldValueInfo(SCALAR, fieldValue); } else if (isEnum(fieldType)) { fieldValue = completeValueForEnum(executionContext, parameters, (GraphQLEnumType) fieldType, result); - return FieldValueInfo.newFieldValueInfo(ENUM).fieldValue(fieldValue).build(); + return new FieldValueInfo(ENUM, fieldValue); } // when we are here, we have a complex type: Interface, Union or Object @@ -486,38 +705,48 @@ protected FieldValueInfo completeValue(ExecutionContext executionContext, Execut // consider the result to be null and add the error on the context handleUnresolvedTypeProblem(executionContext, parameters, ex); // complete field as null, validating it is nullable - return getFieldValueInfoForNull(executionContext, parameters); + return getFieldValueInfoForNull(parameters); } - return FieldValueInfo.newFieldValueInfo(OBJECT).fieldValue(fieldValue).build(); + return new FieldValueInfo(OBJECT, fieldValue); } private void handleUnresolvedTypeProblem(ExecutionContext context, ExecutionStrategyParameters parameters, UnresolvedTypeException e) { UnresolvedTypeError error = new UnresolvedTypeError(parameters.getPath(), parameters.getExecutionStepInfo(), e); - logNotSafe.warn(error.getMessage(), e); context.addError(error); + parameters.getDeferredCallContext().onError(error); } /** * Called to complete a null value. * - * @param executionContext contains the top level execution parameters - * @param parameters contains the parameters holding the fields to be executed and source object + * @param parameters contains the parameters holding the fields to be executed and source object * * @return a {@link FieldValueInfo} * - * @throws NonNullableFieldWasNullException if a non null field resolves to a null value + * @throws NonNullableFieldWasNullException inside a {@link CompletableFuture} if a non null field resolves to a null value */ - private FieldValueInfo getFieldValueInfoForNull(ExecutionContext executionContext, ExecutionStrategyParameters parameters) { - CompletableFuture fieldValue = completeValueForNull(executionContext, parameters); - return FieldValueInfo.newFieldValueInfo(NULL).fieldValue(fieldValue).build(); + private FieldValueInfo getFieldValueInfoForNull(ExecutionStrategyParameters parameters) { + Object fieldValue = completeValueForNull(parameters); + return new FieldValueInfo(NULL, fieldValue); } - protected CompletableFuture completeValueForNull(ExecutionContext executionContext, ExecutionStrategyParameters parameters) { - return Async.tryCatch(() -> { - Object nullValue = parameters.getNonNullFieldValidator().validate(parameters.getPath(), null); - return completedFuture(new ExecutionResultImpl(nullValue, executionContext.getErrors())); - }); + /** + * Called to complete a null value. + * + * @param parameters contains the parameters holding the fields to be executed and source object + * + * @return a null value or a {@link CompletableFuture} exceptionally completed + * + * @throws NonNullableFieldWasNullException inside the {@link CompletableFuture} if a non-null field resolves to a null value + */ + protected Object /* CompletableFuture | Object */ + completeValueForNull(ExecutionStrategyParameters parameters) { + try { + return parameters.getNonNullFieldValidator().validate(parameters.getPath(), null); + } catch (Exception e) { + return Async.exceptionallyCompletedFuture(e); + } } /** @@ -535,10 +764,10 @@ protected FieldValueInfo completeValueForList(ExecutionContext executionContext, try { resultIterable = parameters.getNonNullFieldValidator().validate(parameters.getPath(), resultIterable); } catch (NonNullableFieldWasNullException e) { - return FieldValueInfo.newFieldValueInfo(LIST).fieldValue(exceptionallyCompletedFuture(e)).build(); + return new FieldValueInfo(LIST, exceptionallyCompletedFuture(e)); } if (resultIterable == null) { - return FieldValueInfo.newFieldValueInfo(LIST).fieldValue(completedFuture(new ExecutionResultImpl(null, executionContext.getErrors()))).build(); + return new FieldValueInfo(LIST, null); } return completeValueForList(executionContext, parameters, resultIterable); } @@ -561,13 +790,17 @@ protected FieldValueInfo completeValueForList(ExecutionContext executionContext, InstrumentationFieldCompleteParameters instrumentationParams = new InstrumentationFieldCompleteParameters(executionContext, parameters, () -> executionStepInfo, iterableValues); Instrumentation instrumentation = executionContext.getInstrumentation(); - InstrumentationContext completeListCtx = nonNullCtx(instrumentation.beginFieldListComplete( + InstrumentationContext completeListCtx = nonNullCtx(instrumentation.beginFieldListCompletion( instrumentationParams, executionContext.getInstrumentationState() )); List fieldValueInfos = new ArrayList<>(size.orElse(1)); int index = 0; for (Object item : iterableValues) { + if (incrementAndCheckMaxNodesExceeded(executionContext)) { + return new FieldValueInfo(NULL, null, fieldValueInfos); + } + ResultPath indexedPath = parameters.getPath().segment(index); ExecutionStepInfo stepInfoForListElement = executionStepInfoFactory.newExecutionStepInfoForListElement(executionStepInfo, indexedPath); @@ -587,32 +820,54 @@ protected FieldValueInfo completeValueForList(ExecutionContext executionContext, index++; } - CompletableFuture> resultsFuture = Async.each(fieldValueInfos, FieldValueInfo::getFieldValue); - - CompletableFuture overallResult = new CompletableFuture<>(); - completeListCtx.onDispatched(overallResult); + Object listResults = Async.eachPolymorphic(fieldValueInfos, FieldValueInfo::getFieldValueObject); + Object listOrPromiseToList; + if (listResults instanceof CompletableFuture) { + @SuppressWarnings("unchecked") + CompletableFuture> resultsFuture = (CompletableFuture>) listResults; + CompletableFuture overallResult = new CompletableFuture<>(); + completeListCtx.onDispatched(); + overallResult.whenComplete(completeListCtx::onCompleted); + + resultsFuture.whenComplete((results, exception) -> { + if (exception != null) { + handleValueException(overallResult, exception, executionContext); + return; + } + List completedResults = new ArrayList<>(results.size()); + completedResults.addAll(results); + overallResult.complete(completedResults); + }); + listOrPromiseToList = overallResult; + } else { + completeListCtx.onCompleted(listResults, null); + listOrPromiseToList = listResults; + } + return new FieldValueInfo(LIST, listOrPromiseToList, fieldValueInfos); + } - resultsFuture.whenComplete((results, exception) -> { - if (exception != null) { - ExecutionResult executionResult = handleNonNullException(executionContext, overallResult, exception); - completeListCtx.onCompleted(executionResult, exception); - return; + protected void handleValueException(CompletableFuture overallResult, Throwable e, ExecutionContext executionContext) { + Throwable underlyingException = e; + if (e instanceof CompletionException) { + underlyingException = e.getCause(); + } + if (underlyingException instanceof NonNullableFieldWasNullException) { + assertNonNullFieldPrecondition((NonNullableFieldWasNullException) underlyingException, overallResult); + if (!overallResult.isDone()) { + overallResult.complete(null); } - List completedResults = new ArrayList<>(results.size()); - for (ExecutionResult completedValue : results) { - completedResults.add(completedValue.getData()); + } else if (underlyingException instanceof AbortExecutionException) { + AbortExecutionException abortException = (AbortExecutionException) underlyingException; + executionContext.addError(abortException); + if (!overallResult.isDone()) { + overallResult.complete(null); } - ExecutionResultImpl executionResult = new ExecutionResultImpl(completedResults, executionContext.getErrors()); - overallResult.complete(executionResult); - }); - overallResult.whenComplete(completeListCtx::onCompleted); - - return FieldValueInfo.newFieldValueInfo(LIST) - .fieldValue(overallResult) - .fieldValueInfos(fieldValueInfos) - .build(); + } else { + overallResult.completeExceptionally(e); + } } + /** * Called to turn an object into a scalar value according to the {@link GraphQLScalarType} by asking that scalar type to coerce the object * into a valid value @@ -622,9 +877,10 @@ protected FieldValueInfo completeValueForList(ExecutionContext executionContext, * @param scalarType the type of the scalar * @param result the result to be coerced * - * @return a promise to an {@link ExecutionResult} + * @return a materialized scalar value or exceptionally completed {@link CompletableFuture} if there is a problem */ - protected CompletableFuture completeValueForScalar(ExecutionContext executionContext, ExecutionStrategyParameters parameters, GraphQLScalarType scalarType, Object result) { + protected Object /* CompletableFuture | Object */ + completeValueForScalar(ExecutionContext executionContext, ExecutionStrategyParameters parameters, GraphQLScalarType scalarType, Object result) { Object serialized; try { serialized = scalarType.getCoercing().serialize(result, executionContext.getGraphQLContext(), executionContext.getLocale()); @@ -637,20 +893,21 @@ protected CompletableFuture completeValueForScalar(ExecutionCon } catch (NonNullableFieldWasNullException e) { return exceptionallyCompletedFuture(e); } - return completedFuture(new ExecutionResultImpl(serialized, executionContext.getErrors())); + return serialized; } /** - * Called to turn an object into a enum value according to the {@link GraphQLEnumType} by asking that enum type to coerce the object into a valid value + * Called to turn an object into an enum value according to the {@link GraphQLEnumType} by asking that enum type to coerce the object into a valid value * * @param executionContext contains the top level execution parameters * @param parameters contains the parameters holding the fields to be executed and source object * @param enumType the type of the enum * @param result the result to be coerced * - * @return a promise to an {@link ExecutionResult} + * @return a materialized enum value or exceptionally completed {@link CompletableFuture} if there is a problem */ - protected CompletableFuture completeValueForEnum(ExecutionContext executionContext, ExecutionStrategyParameters parameters, GraphQLEnumType enumType, Object result) { + protected Object /* CompletableFuture | Object */ + completeValueForEnum(ExecutionContext executionContext, ExecutionStrategyParameters parameters, GraphQLEnumType enumType, Object result) { Object serialized; try { serialized = enumType.serialize(result, executionContext.getGraphQLContext(), executionContext.getLocale()); @@ -662,7 +919,7 @@ protected CompletableFuture completeValueForEnum(ExecutionConte } catch (NonNullableFieldWasNullException e) { return exceptionallyCompletedFuture(e); } - return completedFuture(new ExecutionResultImpl(serialized, executionContext.getErrors())); + return serialized; } /** @@ -673,9 +930,10 @@ protected CompletableFuture completeValueForEnum(ExecutionConte * @param resolvedObjectType the resolved object type * @param result the result to be coerced * - * @return a promise to an {@link ExecutionResult} + * @return a {@link CompletableFuture} promise to a map of object field values or a materialized map of object field values */ - protected CompletableFuture completeValueForObject(ExecutionContext executionContext, ExecutionStrategyParameters parameters, GraphQLObjectType resolvedObjectType, Object result) { + protected Object /* CompletableFuture> | Map */ + completeValueForObject(ExecutionContext executionContext, ExecutionStrategyParameters parameters, GraphQLObjectType resolvedObjectType, Object result) { ExecutionStepInfo executionStepInfo = parameters.getExecutionStepInfo(); FieldCollectorParameters collectorParameters = newParameters() @@ -683,9 +941,16 @@ protected CompletableFuture completeValueForObject(ExecutionCon .objectType(resolvedObjectType) .fragments(executionContext.getFragmentsByName()) .variables(executionContext.getCoercedVariables().toMap()) + .graphQLContext(executionContext.getGraphQLContext()) .build(); - MergedSelectionSet subFields = fieldCollector.collectFields(collectorParameters, parameters.getField()); + MergedSelectionSet subFields = fieldCollector.collectFields( + collectorParameters, + parameters.getField(), + Optional.ofNullable(executionContext.getGraphQLContext()) + .map(graphqlContext -> (Boolean) graphqlContext.get(ExperimentalApi.ENABLE_INCREMENTAL_SUPPORT)) + .orElse(false) + ); ExecutionStepInfo newExecutionStepInfo = executionStepInfo.changeTypeWithPreservedNonNull(resolvedObjectType); NonNullableFieldValidator nonNullableFieldValidator = new NonNullableFieldValidator(executionContext, newExecutionStepInfo); @@ -698,16 +963,15 @@ protected CompletableFuture completeValueForObject(ExecutionCon ); // Calling this from the executionContext to ensure we shift back from mutation strategy to the query strategy. - - return executionContext.getQueryStrategy().execute(executionContext, newParameters); + return executionContext.getQueryStrategy().executeObject(executionContext, newParameters); } @SuppressWarnings("SameReturnValue") private Object handleCoercionProblem(ExecutionContext context, ExecutionStrategyParameters parameters, CoercingSerializeException e) { SerializationError error = new SerializationError(parameters.getPath(), e); - logNotSafe.warn(error.getMessage(), e); context.addError(error); + parameters.getDeferredCallContext().onError(error); return null; } @@ -720,23 +984,41 @@ protected GraphQLObjectType resolveType(ExecutionContext executionContext, Execu return resolvedType.resolveType(executionContext, parameters.getField(), parameters.getSource(), parameters.getExecutionStepInfo(), fieldType, parameters.getLocalContext()); } - protected Iterable toIterable(ExecutionContext context, ExecutionStrategyParameters parameters, Object result) { if (FpKit.isIterable(result)) { return FpKit.toIterable(result); } - handleTypeMismatchProblem(context, parameters, result); + handleTypeMismatchProblem(context, parameters); return null; } - - private void handleTypeMismatchProblem(ExecutionContext context, ExecutionStrategyParameters parameters, Object result) { + private void handleTypeMismatchProblem(ExecutionContext context, ExecutionStrategyParameters parameters) { TypeMismatchError error = new TypeMismatchError(parameters.getPath(), parameters.getExecutionStepInfo().getUnwrappedNonNullType()); - logNotSafe.warn("{} got {}", error.getMessage(), result.getClass()); context.addError(error); + + parameters.getDeferredCallContext().onError(error); } + /** + * This has a side effect of incrementing the number of nodes returned and also checks + * if max nodes were exceeded for this request. + * + * @param executionContext the execution context in play + * + * @return true if max nodes were exceeded + */ + private boolean incrementAndCheckMaxNodesExceeded(ExecutionContext executionContext) { + int resultNodesCount = executionContext.getResultNodesInfo().incrementAndGetResultNodesCount(); + Integer maxNodes; + if ((maxNodes = executionContext.getGraphQLContext().get(MAX_RESULT_NODES)) != null) { + if (resultNodesCount > maxNodes) { + executionContext.getResultNodesInfo().maxResultNodesExceeded(); + return true; + } + } + return false; + } /** * Called to discover the field definition give the current parameters and the AST {@link Field} @@ -762,7 +1044,7 @@ protected GraphQLFieldDefinition getFieldDef(ExecutionContext executionContext, * @return a {@link GraphQLFieldDefinition} */ protected GraphQLFieldDefinition getFieldDef(GraphQLSchema schema, GraphQLObjectType parentType, Field field) { - return Introspection.getFieldDef(schema, parentType, field.getName()); + return Introspection.getFieldDefinition(schema, parentType, field.getName()); } /** @@ -816,7 +1098,6 @@ protected ExecutionResult handleNonNullException(ExecutionContext executionConte return executionResult; } - /** * Builds the type info hierarchy for the current field * @@ -835,20 +1116,12 @@ protected ExecutionStepInfo createExecutionStepInfo(ExecutionContext executionCo ExecutionStepInfo parentStepInfo = parameters.getExecutionStepInfo(); GraphQLOutputType fieldType = fieldDefinition.getType(); List fieldArgDefs = fieldDefinition.getArguments(); - Supplier> argumentValues = ImmutableKit::emptyMap; + Supplier> argumentValues = ImmutableMapWithNullValues::emptyMap; // // no need to create args at all if there are none on the field def // if (!fieldArgDefs.isEmpty()) { - List fieldArgs = field.getArguments(); - GraphQLCodeRegistry codeRegistry = executionContext.getGraphQLSchema().getCodeRegistry(); - Supplier> argValuesSupplier = () -> ValuesResolver.getArgumentValues(codeRegistry, - fieldArgDefs, - fieldArgs, - executionContext.getCoercedVariables(), - executionContext.getGraphQLContext(), - executionContext.getLocale()); - argumentValues = FpKit.intraThreadMemoize(argValuesSupplier); + argumentValues = getArgumentValues(executionContext, fieldArgDefs, field.getArguments()); } @@ -863,21 +1136,24 @@ protected ExecutionStepInfo createExecutionStepInfo(ExecutionContext executionCo .build(); } + @NotNull + private static Supplier> getArgumentValues(ExecutionContext executionContext, + List fieldArgDefs, + List fieldArgs) { + Supplier> argumentValues; + GraphQLCodeRegistry codeRegistry = executionContext.getGraphQLSchema().getCodeRegistry(); + Supplier> argValuesSupplier = () -> { + Map resolvedValues = ValuesResolver.getArgumentValues(codeRegistry, + fieldArgDefs, + fieldArgs, + executionContext.getCoercedVariables(), + executionContext.getGraphQLContext(), + executionContext.getLocale()); - @Internal - public static String mkNameForPath(Field currentField) { - return mkNameForPath(Collections.singletonList(currentField)); - } - - @Internal - public static String mkNameForPath(MergedField mergedField) { - return mkNameForPath(mergedField.getFields()); + return ImmutableMapWithNullValues.copyOf(resolvedValues); + }; + argumentValues = FpKit.intraThreadMemoize(argValuesSupplier); + return argumentValues; } - - @Internal - public static String mkNameForPath(List currentField) { - Field field = currentField.get(0); - return field.getResultKey(); - } } diff --git a/src/main/java/graphql/execution/ExecutionStrategyParameters.java b/src/main/java/graphql/execution/ExecutionStrategyParameters.java index b413e4321a..e1df7bb363 100644 --- a/src/main/java/graphql/execution/ExecutionStrategyParameters.java +++ b/src/main/java/graphql/execution/ExecutionStrategyParameters.java @@ -1,7 +1,7 @@ package graphql.execution; -import graphql.Assert; import graphql.PublicApi; +import graphql.execution.incremental.DeferredCallContext; import java.util.function.Consumer; @@ -20,6 +20,7 @@ public class ExecutionStrategyParameters { private final ResultPath path; private final MergedField currentField; private final ExecutionStrategyParameters parent; + private final DeferredCallContext deferredCallContext; private ExecutionStrategyParameters(ExecutionStepInfo executionStepInfo, Object source, @@ -28,7 +29,8 @@ private ExecutionStrategyParameters(ExecutionStepInfo executionStepInfo, NonNullableFieldValidator nonNullableFieldValidator, ResultPath path, MergedField currentField, - ExecutionStrategyParameters parent) { + ExecutionStrategyParameters parent, + DeferredCallContext deferredCallContext) { this.executionStepInfo = assertNotNull(executionStepInfo, () -> "executionStepInfo is null"); this.localContext = localContext; @@ -38,6 +40,7 @@ private ExecutionStrategyParameters(ExecutionStepInfo executionStepInfo, this.path = path; this.currentField = currentField; this.parent = parent; + this.deferredCallContext = deferredCallContext; } public ExecutionStepInfo getExecutionStepInfo() { @@ -68,6 +71,10 @@ public ExecutionStrategyParameters getParent() { return parent; } + public DeferredCallContext getDeferredCallContext() { + return deferredCallContext; + } + /** * This returns the current field in its query representations. * @@ -106,6 +113,7 @@ public static class Builder { ResultPath path = ResultPath.rootPath(); MergedField currentField; ExecutionStrategyParameters parent; + DeferredCallContext deferredCallContext; /** * @see ExecutionStrategyParameters#newParameters() @@ -123,6 +131,7 @@ private Builder(ExecutionStrategyParameters oldParameters) { this.fields = oldParameters.fields; this.nonNullableFieldValidator = oldParameters.nonNullableFieldValidator; this.currentField = oldParameters.currentField; + this.deferredCallContext = oldParameters.deferredCallContext; this.path = oldParameters.path; this.parent = oldParameters.parent; } @@ -158,7 +167,7 @@ public Builder localContext(Object localContext) { } public Builder nonNullFieldValidator(NonNullableFieldValidator nonNullableFieldValidator) { - this.nonNullableFieldValidator = Assert.assertNotNull(nonNullableFieldValidator, () -> "requires a NonNullValidator"); + this.nonNullableFieldValidator = assertNotNull(nonNullableFieldValidator, () -> "requires a NonNullValidator"); return this; } @@ -172,9 +181,16 @@ public Builder parent(ExecutionStrategyParameters parent) { return this; } + public Builder deferredCallContext(DeferredCallContext deferredCallContext) { + this.deferredCallContext = deferredCallContext; + return this; + } public ExecutionStrategyParameters build() { - return new ExecutionStrategyParameters(executionStepInfo, source, localContext, fields, nonNullableFieldValidator, path, currentField, parent); + if (deferredCallContext == null) { + deferredCallContext = new DeferredCallContext(); + } + return new ExecutionStrategyParameters(executionStepInfo, source, localContext, fields, nonNullableFieldValidator, path, currentField, parent, deferredCallContext); } } } diff --git a/src/main/java/graphql/execution/FetchedValue.java b/src/main/java/graphql/execution/FetchedValue.java index 28d2ce6da6..0a643f8b71 100644 --- a/src/main/java/graphql/execution/FetchedValue.java +++ b/src/main/java/graphql/execution/FetchedValue.java @@ -6,7 +6,6 @@ import graphql.execution.instrumentation.parameters.InstrumentationFieldCompleteParameters; import java.util.List; -import java.util.function.Consumer; /** * Note: This is returned by {@link InstrumentationFieldCompleteParameters#getFetchedValue()} @@ -15,14 +14,12 @@ @PublicApi public class FetchedValue { private final Object fetchedValue; - private final Object rawFetchedValue; private final Object localContext; private final ImmutableList errors; - private FetchedValue(Object fetchedValue, Object rawFetchedValue, ImmutableList errors, Object localContext) { + FetchedValue(Object fetchedValue, List errors, Object localContext) { this.fetchedValue = fetchedValue; - this.rawFetchedValue = rawFetchedValue; - this.errors = errors; + this.errors = ImmutableList.copyOf(errors); this.localContext = localContext; } @@ -33,10 +30,6 @@ public Object getFetchedValue() { return fetchedValue; } - public Object getRawFetchedValue() { - return rawFetchedValue; - } - public List getErrors() { return errors; } @@ -45,64 +38,13 @@ public Object getLocalContext() { return localContext; } - public FetchedValue transform(Consumer builderConsumer) { - Builder builder = newFetchedValue(this); - builderConsumer.accept(builder); - return builder.build(); - } - @Override public String toString() { return "FetchedValue{" + "fetchedValue=" + fetchedValue + - ", rawFetchedValue=" + rawFetchedValue + ", localContext=" + localContext + ", errors=" + errors + '}'; } - public static Builder newFetchedValue() { - return new Builder(); - } - - public static Builder newFetchedValue(FetchedValue otherValue) { - return new Builder() - .fetchedValue(otherValue.getFetchedValue()) - .rawFetchedValue(otherValue.getRawFetchedValue()) - .errors(otherValue.getErrors()) - .localContext(otherValue.getLocalContext()) - ; - } - - public static class Builder { - - private Object fetchedValue; - private Object rawFetchedValue; - private Object localContext; - private ImmutableList errors = ImmutableList.of(); - - public Builder fetchedValue(Object fetchedValue) { - this.fetchedValue = fetchedValue; - return this; - } - - public Builder rawFetchedValue(Object rawFetchedValue) { - this.rawFetchedValue = rawFetchedValue; - return this; - } - - public Builder localContext(Object localContext) { - this.localContext = localContext; - return this; - } - - public Builder errors(List errors) { - this.errors = ImmutableList.copyOf(errors); - return this; - } - - public FetchedValue build() { - return new FetchedValue(fetchedValue, rawFetchedValue, errors, localContext); - } - } } \ No newline at end of file diff --git a/src/main/java/graphql/execution/FieldCollector.java b/src/main/java/graphql/execution/FieldCollector.java index a6f1310a8c..93d58f97ab 100644 --- a/src/main/java/graphql/execution/FieldCollector.java +++ b/src/main/java/graphql/execution/FieldCollector.java @@ -3,6 +3,8 @@ import graphql.Internal; import graphql.execution.conditional.ConditionalNodes; +import graphql.execution.incremental.DeferredExecution; +import graphql.execution.incremental.IncrementalUtils; import graphql.language.Field; import graphql.language.FragmentDefinition; import graphql.language.FragmentSpread; @@ -25,7 +27,7 @@ /** * A field collector can iterate over field selection sets and build out the sub fields that have been selected, - * expanding named and inline fragments as it goes.s + * expanding named and inline fragments as it goes. */ @Internal public class FieldCollector { @@ -33,13 +35,17 @@ public class FieldCollector { private final ConditionalNodes conditionalNodes = new ConditionalNodes(); public MergedSelectionSet collectFields(FieldCollectorParameters parameters, MergedField mergedField) { + return collectFields(parameters, mergedField, false); + } + + public MergedSelectionSet collectFields(FieldCollectorParameters parameters, MergedField mergedField, boolean incrementalSupport) { Map subFields = new LinkedHashMap<>(); Set visitedFragments = new LinkedHashSet<>(); for (Field field : mergedField.getFields()) { if (field.getSelectionSet() == null) { continue; } - this.collectFields(parameters, field.getSelectionSet(), visitedFragments, subFields); + this.collectFields(parameters, field.getSelectionSet(), visitedFragments, subFields, null, incrementalSupport); } return newMergedSelectionSet().subFields(subFields).build(); } @@ -53,27 +59,31 @@ public MergedSelectionSet collectFields(FieldCollectorParameters parameters, Mer * @return a map of the sub field selections */ public MergedSelectionSet collectFields(FieldCollectorParameters parameters, SelectionSet selectionSet) { + return collectFields(parameters, selectionSet, false); + } + + public MergedSelectionSet collectFields(FieldCollectorParameters parameters, SelectionSet selectionSet, boolean incrementalSupport) { Map subFields = new LinkedHashMap<>(); Set visitedFragments = new LinkedHashSet<>(); - this.collectFields(parameters, selectionSet, visitedFragments, subFields); + this.collectFields(parameters, selectionSet, visitedFragments, subFields, null, incrementalSupport); return newMergedSelectionSet().subFields(subFields).build(); } - private void collectFields(FieldCollectorParameters parameters, SelectionSet selectionSet, Set visitedFragments, Map fields) { + private void collectFields(FieldCollectorParameters parameters, SelectionSet selectionSet, Set visitedFragments, Map fields, DeferredExecution deferredExecution, boolean incrementalSupport) { for (Selection selection : selectionSet.getSelections()) { if (selection instanceof Field) { - collectField(parameters, fields, (Field) selection); + collectField(parameters, fields, (Field) selection, deferredExecution); } else if (selection instanceof InlineFragment) { - collectInlineFragment(parameters, visitedFragments, fields, (InlineFragment) selection); + collectInlineFragment(parameters, visitedFragments, fields, (InlineFragment) selection, incrementalSupport); } else if (selection instanceof FragmentSpread) { - collectFragmentSpread(parameters, visitedFragments, fields, (FragmentSpread) selection); + collectFragmentSpread(parameters, visitedFragments, fields, (FragmentSpread) selection, incrementalSupport); } } } - private void collectFragmentSpread(FieldCollectorParameters parameters, Set visitedFragments, Map fields, FragmentSpread fragmentSpread) { + private void collectFragmentSpread(FieldCollectorParameters parameters, Set visitedFragments, Map fields, FragmentSpread fragmentSpread, boolean incrementalSupport) { if (visitedFragments.contains(fragmentSpread.getName())) { return; } @@ -95,10 +105,17 @@ private void collectFragmentSpread(FieldCollectorParameters parameters, Set visitedFragments, Map fields, InlineFragment inlineFragment) { + private void collectInlineFragment(FieldCollectorParameters parameters, Set visitedFragments, Map fields, InlineFragment inlineFragment, boolean incrementalSupport) { if (!conditionalNodes.shouldInclude(inlineFragment, parameters.getVariables(), parameters.getGraphQLSchema(), @@ -106,10 +123,17 @@ private void collectInlineFragment(FieldCollectorParameters parameters, Set fields, Field field) { + private void collectField(FieldCollectorParameters parameters, Map fields, Field field, DeferredExecution deferredExecution) { if (!conditionalNodes.shouldInclude(field, parameters.getVariables(), parameters.getGraphQLSchema(), @@ -119,9 +143,12 @@ private void collectField(FieldCollectorParameters parameters, Map builder.addField(field))); + fields.put(name, curFields.transform(builder -> builder + .addField(field) + .addDeferredExecution(deferredExecution)) + ); } else { - fields.put(name, MergedField.newMergedField(field).build()); + fields.put(name, MergedField.newSingletonMergedField(field, deferredExecution)); } } diff --git a/src/main/java/graphql/execution/FieldValueInfo.java b/src/main/java/graphql/execution/FieldValueInfo.java index 168ffab735..283cad42c6 100644 --- a/src/main/java/graphql/execution/FieldValueInfo.java +++ b/src/main/java/graphql/execution/FieldValueInfo.java @@ -1,14 +1,25 @@ package graphql.execution; +import com.google.common.collect.ImmutableList; import graphql.ExecutionResult; +import graphql.ExecutionResultImpl; import graphql.PublicApi; -import java.util.ArrayList; import java.util.List; import java.util.concurrent.CompletableFuture; import static graphql.Assert.assertNotNull; +/** + * The {@link FieldValueInfo} holds the type of field that was fetched and completed along with the completed value. + *

+ * A field value is considered when its is both fetch via a {@link graphql.schema.DataFetcher} to a raw value, and then + * it is serialized into scalar or enum or if it's an object type, it is completed as an object given its field sub selection + *

+ * The {@link #getFieldValueObject()} method returns either a materialized value or a {@link CompletableFuture} + * promise to a materialized value. Simple in-memory values will tend to be materialized, while complicated + * values might need a call to a database or other systems will tend to be {@link CompletableFuture} promises. + */ @PublicApi public class FieldValueInfo { @@ -18,73 +29,87 @@ public enum CompleteValueType { NULL, SCALAR, ENUM - } private final CompleteValueType completeValueType; - private final CompletableFuture fieldValue; + private final Object /* CompletableFuture | Object */ fieldValueObject; private final List fieldValueInfos; - private FieldValueInfo(CompleteValueType completeValueType, CompletableFuture fieldValue, List fieldValueInfos) { - assertNotNull(fieldValueInfos, () -> "fieldValueInfos can't be null"); + public FieldValueInfo(CompleteValueType completeValueType, Object fieldValueObject) { + this(completeValueType, fieldValueObject, ImmutableList.of()); + } + + public FieldValueInfo(CompleteValueType completeValueType, Object fieldValueObject, List fieldValueInfos) { + assertNotNull(fieldValueInfos, "fieldValueInfos can't be null"); this.completeValueType = completeValueType; - this.fieldValue = fieldValue; + this.fieldValueObject = fieldValueObject; this.fieldValueInfos = fieldValueInfos; } + /** + * This is an enum that represents the type of field value that was completed for a field + * + * @return the type of field value + */ public CompleteValueType getCompleteValueType() { return completeValueType; } + /** + * This value can be either an object that is materialized or a {@link CompletableFuture} promise to a value + * + * @return either an object that is materialized or a {@link CompletableFuture} promise to a value + */ + public Object /* CompletableFuture | Object */ getFieldValueObject() { + return fieldValueObject; + } + + /** + * This returns the value in {@link CompletableFuture} form. If it is already a {@link CompletableFuture} it is returned + * directly, otherwise the materialized value is wrapped in a {@link CompletableFuture} and returned + * + * @return a {@link CompletableFuture} promise to the value + */ + public CompletableFuture getFieldValueFuture() { + return Async.toCompletableFuture(fieldValueObject); + } + + /** + * Kept for legacy reasons - this method is no longer sensible and is no longer used by the graphql-java engine + * and is kept only for backwards compatible API reasons. + * + * @return a promise to the {@link ExecutionResult} that wraps the field value. + */ + @Deprecated(since = "2023-09-11") public CompletableFuture getFieldValue() { - return fieldValue; + return getFieldValueFuture().thenApply(fv -> ExecutionResultImpl.newExecutionResult().data(fv).build()); + } + + /** + * @return true if the value is a {@link CompletableFuture} promise to a value + */ + public boolean isFutureValue() { + return fieldValueObject instanceof CompletableFuture; } + /** + * When the {@link #getCompleteValueType()} is {@link CompleteValueType#LIST} this holds the list + * of completed values inside that list object. + * + * @return the list of completed field values inside a list + */ public List getFieldValueInfos() { return fieldValueInfos; } - public static Builder newFieldValueInfo(CompleteValueType completeValueType) { - return new Builder(completeValueType); - } @Override public String toString() { return "FieldValueInfo{" + "completeValueType=" + completeValueType + - ", fieldValue=" + fieldValue + + ", fieldValueObject=" + fieldValueObject + ", fieldValueInfos=" + fieldValueInfos + '}'; } - @SuppressWarnings("unused") - public static class Builder { - private CompleteValueType completeValueType; - private CompletableFuture executionResultFuture; - private List listInfos = new ArrayList<>(); - - public Builder(CompleteValueType completeValueType) { - this.completeValueType = completeValueType; - } - - public Builder completeValueType(CompleteValueType completeValueType) { - this.completeValueType = completeValueType; - return this; - } - - public Builder fieldValue(CompletableFuture executionResultFuture) { - this.executionResultFuture = executionResultFuture; - return this; - } - - public Builder fieldValueInfos(List listInfos) { - assertNotNull(listInfos, () -> "fieldValueInfos can't be null"); - this.listInfos = listInfos; - return this; - } - - public FieldValueInfo build() { - return new FieldValueInfo(completeValueType, executionResultFuture, listInfos); - } - } } \ No newline at end of file diff --git a/src/main/java/graphql/execution/MergedField.java b/src/main/java/graphql/execution/MergedField.java index 2ce672bb5b..e66afb63f8 100644 --- a/src/main/java/graphql/execution/MergedField.java +++ b/src/main/java/graphql/execution/MergedField.java @@ -1,10 +1,13 @@ package graphql.execution; import com.google.common.collect.ImmutableList; +import graphql.ExperimentalApi; import graphql.PublicApi; +import graphql.execution.incremental.DeferredExecution; import graphql.language.Argument; import graphql.language.Field; +import javax.annotation.Nullable; import java.util.List; import java.util.Objects; import java.util.function.Consumer; @@ -61,11 +64,17 @@ public class MergedField { private final ImmutableList fields; private final Field singleField; + private final ImmutableList deferredExecutions; - private MergedField(ImmutableList fields) { + private MergedField(ImmutableList fields, ImmutableList deferredExecutions) { assertNotEmpty(fields); this.fields = fields; this.singleField = fields.get(0); + this.deferredExecutions = deferredExecutions; + } + + private MergedField(Field field, DeferredExecution deferredExecution) { + this(ImmutableList.of(field), deferredExecution == null ? ImmutableList.of() : ImmutableList.of(deferredExecution)); } /** @@ -120,6 +129,16 @@ public List getFields() { return fields; } + /** + * Get a list of all {@link DeferredExecution}s that this field is part of + * + * @return all defer executions. + */ + @ExperimentalApi + public List getDeferredExecutions() { + return deferredExecutions; + } + public static Builder newMergedField() { return new Builder(); } @@ -132,6 +151,10 @@ public static Builder newMergedField(List fields) { return new Builder().fields(fields); } + static MergedField newSingletonMergedField(Field field, DeferredExecution deferredExecution) { + return new MergedField(field, deferredExecution); + } + public MergedField transform(Consumer builderConsumer) { Builder builder = new Builder(this); builderConsumer.accept(builder); @@ -141,12 +164,14 @@ public MergedField transform(Consumer builderConsumer) { public static class Builder { private final ImmutableList.Builder fields = new ImmutableList.Builder<>(); + private final ImmutableList.Builder deferredExecutions = new ImmutableList.Builder<>(); private Builder() { } private Builder(MergedField existing) { fields.addAll(existing.getFields()); + deferredExecutions.addAll(existing.deferredExecutions); } public Builder fields(List fields) { @@ -159,8 +184,20 @@ public Builder addField(Field field) { return this; } + public Builder addDeferredExecutions(List deferredExecutions) { + this.deferredExecutions.addAll(deferredExecutions); + return this; + } + + public Builder addDeferredExecution(@Nullable DeferredExecution deferredExecution) { + if(deferredExecution != null) { + this.deferredExecutions.add(deferredExecution); + } + return this; + } + public MergedField build() { - return new MergedField(fields.build()); + return new MergedField(fields.build(), deferredExecutions.build()); } } diff --git a/src/main/java/graphql/execution/MergedSelectionSet.java b/src/main/java/graphql/execution/MergedSelectionSet.java index 321a82c7ec..8f279f05a6 100644 --- a/src/main/java/graphql/execution/MergedSelectionSet.java +++ b/src/main/java/graphql/execution/MergedSelectionSet.java @@ -13,10 +13,12 @@ @PublicApi public class MergedSelectionSet { - private final ImmutableMap subFields; + private final Map subFields; + private final List keys; - private MergedSelectionSet(Map subFields) { - this.subFields = ImmutableMap.copyOf(Assert.assertNotNull(subFields)); + protected MergedSelectionSet(Map subFields) { + this.subFields = subFields == null ? ImmutableMap.of() : subFields; + this.keys = ImmutableList.copyOf(this.subFields.keySet()); } public Map getSubFields() { @@ -40,7 +42,7 @@ public MergedField getSubField(String key) { } public List getKeys() { - return ImmutableList.copyOf(keySet()); + return keys; } public boolean isEmpty() { @@ -52,10 +54,10 @@ public static Builder newMergedSelectionSet() { } public static class Builder { - private Map subFields = ImmutableMap.of(); - private Builder() { + private Map subFields; + private Builder() { } public Builder subFields(Map subFields) { diff --git a/src/main/java/graphql/execution/ResultNodesInfo.java b/src/main/java/graphql/execution/ResultNodesInfo.java new file mode 100644 index 0000000000..afc366f6be --- /dev/null +++ b/src/main/java/graphql/execution/ResultNodesInfo.java @@ -0,0 +1,55 @@ +package graphql.execution; + +import graphql.Internal; +import graphql.PublicApi; + +import java.util.concurrent.atomic.AtomicInteger; + +/** + * This class is used to track the number of result nodes that have been created during execution. + * After each execution the GraphQLContext contains a ResultNodeInfo object under the key {@link ResultNodesInfo#RESULT_NODES_INFO} + *

+ * The number of result can be limited (and should be for security reasons) by setting the maximum number of result nodes + * in the GraphQLContext under the key {@link ResultNodesInfo#MAX_RESULT_NODES} to an Integer + *

+ */ +@PublicApi +public class ResultNodesInfo { + + public static final String MAX_RESULT_NODES = "__MAX_RESULT_NODES"; + public static final String RESULT_NODES_INFO = "__RESULT_NODES_INFO"; + + private volatile boolean maxResultNodesExceeded = false; + private final AtomicInteger resultNodesCount = new AtomicInteger(0); + + @Internal + public int incrementAndGetResultNodesCount() { + return resultNodesCount.incrementAndGet(); + } + + @Internal + public void maxResultNodesExceeded() { + this.maxResultNodesExceeded = true; + } + + /** + * The number of result nodes created. + * Note: this can be higher than max result nodes because + * a each node that exceeds the number of max nodes is set to null, + * but still is a result node (with value null) + * + * @return number of result nodes created + */ + public int getResultNodesCount() { + return resultNodesCount.get(); + } + + /** + * If the number of result nodes has exceeded the maximum allowed numbers. + * + * @return true if the number of result nodes has exceeded the maximum allowed numbers + */ + public boolean isMaxResultNodesExceeded() { + return maxResultNodesExceeded; + } +} diff --git a/src/main/java/graphql/execution/ResultPath.java b/src/main/java/graphql/execution/ResultPath.java index 7f65379ade..472b5fa529 100644 --- a/src/main/java/graphql/execution/ResultPath.java +++ b/src/main/java/graphql/execution/ResultPath.java @@ -1,7 +1,6 @@ package graphql.execution; import com.google.common.collect.ImmutableList; -import graphql.Assert; import graphql.AssertException; import graphql.PublicApi; import graphql.collect.ImmutableKit; @@ -112,6 +111,7 @@ public ResultPath getParent() { * Parses an execution path from the provided path string in the format /segment1/segment2[index]/segmentN * * @param pathString the path string + * * @return a parsed execution path */ public static ResultPath parse(String pathString) { @@ -140,6 +140,7 @@ public static ResultPath parse(String pathString) { * This will create an execution path from the list of objects * * @param objects the path objects + * * @return a new execution path */ public static ResultPath fromList(List objects) { @@ -163,6 +164,7 @@ private static String mkErrMsg() { * Takes the current path and adds a new segment to it, returning a new path * * @param segment the string path segment to add + * * @return a new path containing that segment */ public ResultPath segment(String segment) { @@ -173,6 +175,7 @@ public ResultPath segment(String segment) { * Takes the current path and adds a new segment to it, returning a new path * * @param segment the int path segment to add + * * @return a new path containing that segment */ public ResultPath segment(int segment) { @@ -196,10 +199,11 @@ public ResultPath dropSegment() { * equals "/a/b[9]" * * @param segment the integer segment to use + * * @return a new path with the last segment replaced */ public ResultPath replaceSegment(int segment) { - Assert.assertTrue(!ROOT_PATH.equals(this), () -> "You MUST not call this with the root path"); + assertTrue(!ROOT_PATH.equals(this), () -> "You MUST not call this with the root path"); return new ResultPath(parent, segment); } @@ -208,10 +212,11 @@ public ResultPath replaceSegment(int segment) { * equals "/a/b/x" * * @param segment the string segment to use + * * @return a new path with the last segment replaced */ public ResultPath replaceSegment(String segment) { - Assert.assertTrue(!ROOT_PATH.equals(this), () -> "You MUST not call this with the root path"); + assertTrue(!ROOT_PATH.equals(this), () -> "You MUST not call this with the root path"); return new ResultPath(parent, segment); } @@ -227,6 +232,7 @@ public boolean isRootPath() { * Appends the provided path to the current one * * @param path the path to append + * * @return a new path */ public ResultPath append(ResultPath path) { @@ -237,12 +243,12 @@ public ResultPath append(ResultPath path) { public ResultPath sibling(String siblingField) { - Assert.assertTrue(!ROOT_PATH.equals(this), () -> "You MUST not call this with the root path"); + assertTrue(!ROOT_PATH.equals(this), "You MUST not call this with the root path"); return new ResultPath(this.parent, siblingField); } public ResultPath sibling(int siblingField) { - Assert.assertTrue(!ROOT_PATH.equals(this), () -> "You MUST not call this with the root path"); + assertTrue(!ROOT_PATH.equals(this), "You MUST not call this with the root path"); return new ResultPath(this.parent, siblingField); } diff --git a/src/main/java/graphql/execution/SimpleDataFetcherExceptionHandler.java b/src/main/java/graphql/execution/SimpleDataFetcherExceptionHandler.java index 606de0f8a9..79e201a333 100644 --- a/src/main/java/graphql/execution/SimpleDataFetcherExceptionHandler.java +++ b/src/main/java/graphql/execution/SimpleDataFetcherExceptionHandler.java @@ -3,8 +3,6 @@ import graphql.ExceptionWhileDataFetching; import graphql.PublicApi; import graphql.language.SourceLocation; -import graphql.util.LogKit; -import org.slf4j.Logger; import java.util.concurrent.CompletableFuture; import java.util.concurrent.CompletionException; @@ -16,8 +14,6 @@ @PublicApi public class SimpleDataFetcherExceptionHandler implements DataFetcherExceptionHandler { - private static final Logger logNotSafe = LogKit.getNotPrivacySafeLogger(SimpleDataFetcherExceptionHandler.class); - static final SimpleDataFetcherExceptionHandler defaultImpl = new SimpleDataFetcherExceptionHandler(); private DataFetcherExceptionHandlerResult handleExceptionImpl(DataFetcherExceptionHandlerParameters handlerParameters) { @@ -43,7 +39,6 @@ public CompletableFuture handleException(Data * @param exception the exception that happened */ protected void logException(ExceptionWhileDataFetching error, Throwable exception) { - logNotSafe.warn(error.getMessage(), exception); } /** diff --git a/src/main/java/graphql/execution/SubscriptionExecutionStrategy.java b/src/main/java/graphql/execution/SubscriptionExecutionStrategy.java index be816e7add..7a7cd5c952 100644 --- a/src/main/java/graphql/execution/SubscriptionExecutionStrategy.java +++ b/src/main/java/graphql/execution/SubscriptionExecutionStrategy.java @@ -69,7 +69,7 @@ public CompletableFuture execute(ExecutionContext executionCont }); // dispatched the subscription query - executionStrategyCtx.onDispatched(overallResult); + executionStrategyCtx.onDispatched(); overallResult.whenComplete(executionStrategyCtx::onCompleted); return overallResult; @@ -93,7 +93,7 @@ public CompletableFuture execute(ExecutionContext executionCont private CompletableFuture> createSourceEventStream(ExecutionContext executionContext, ExecutionStrategyParameters parameters) { ExecutionStrategyParameters newParameters = firstFieldOfSubscriptionSelection(parameters); - CompletableFuture fieldFetched = fetchField(executionContext, newParameters); + CompletableFuture fieldFetched = Async.toCompletableFuture(fetchField(executionContext, newParameters)); return fieldFetched.thenApply(fetchedValue -> { Object publisher = fetchedValue.getFetchedValue(); if (publisher != null) { @@ -135,16 +135,17 @@ private CompletableFuture executeSubscriptionEvent(ExecutionCon FetchedValue fetchedValue = unboxPossibleDataFetcherResult(newExecutionContext, parameters, eventPayload); FieldValueInfo fieldValueInfo = completeField(newExecutionContext, newParameters, fetchedValue); CompletableFuture overallResult = fieldValueInfo - .getFieldValue() + .getFieldValueFuture() + .thenApply(val -> new ExecutionResultImpl(val, newExecutionContext.getErrors())) .thenApply(executionResult -> wrapWithRootFieldName(newParameters, executionResult)); // dispatch instrumentation so they can know about each subscription event - subscribedFieldCtx.onDispatched(overallResult); + subscribedFieldCtx.onDispatched(); overallResult.whenComplete(subscribedFieldCtx::onCompleted); // allow them to instrument each ER should they want to InstrumentationExecutionParameters i13nExecutionParameters = new InstrumentationExecutionParameters( - executionContext.getExecutionInput(), executionContext.getGraphQLSchema(), executionContext.getInstrumentationState()); + executionContext.getExecutionInput(), executionContext.getGraphQLSchema()); overallResult = overallResult.thenCompose(executionResult -> instrumentation.instrumentExecutionResult(executionResult, i13nExecutionParameters, executionContext.getInstrumentationState())); return overallResult; diff --git a/src/main/java/graphql/execution/TypeResolutionParameters.java b/src/main/java/graphql/execution/TypeResolutionParameters.java index 88a8e2db69..4f20fac4fd 100644 --- a/src/main/java/graphql/execution/TypeResolutionParameters.java +++ b/src/main/java/graphql/execution/TypeResolutionParameters.java @@ -1,6 +1,5 @@ package graphql.execution; -import graphql.DeprecatedAt; import graphql.GraphQLContext; import graphql.Internal; import graphql.TypeResolutionEnvironment; @@ -74,8 +73,7 @@ public static Builder newParameters() { * * @deprecated use {@link #getGraphQLContext()} instead */ - @Deprecated - @DeprecatedAt("2021-07-05") + @Deprecated(since = "2021-07-05") public Object getContext() { return context; } @@ -125,8 +123,7 @@ public Builder schema(GraphQLSchema schema) { return this; } - @Deprecated - @DeprecatedAt("2021-07-05") + @Deprecated(since = "2021-07-05") public Builder context(Object context) { this.context = context; return this; diff --git a/src/main/java/graphql/execution/ValuesResolver.java b/src/main/java/graphql/execution/ValuesResolver.java index 27487ca556..94073cbe6d 100644 --- a/src/main/java/graphql/execution/ValuesResolver.java +++ b/src/main/java/graphql/execution/ValuesResolver.java @@ -1,12 +1,10 @@ package graphql.execution; -import graphql.Assert; import graphql.GraphQLContext; import graphql.Internal; import graphql.collect.ImmutableKit; import graphql.execution.values.InputInterceptor; -import graphql.i18n.I18n; import graphql.language.Argument; import graphql.language.ArrayValue; import graphql.language.NullValue; @@ -27,7 +25,6 @@ import graphql.schema.GraphQLScalarType; import graphql.schema.GraphQLSchema; import graphql.schema.GraphQLType; -import graphql.schema.GraphQLTypeUtil; import graphql.schema.InputValueWithState; import graphql.schema.visibility.GraphqlFieldVisibility; import org.jetbrains.annotations.NotNull; @@ -201,6 +198,7 @@ public static Map getNormalizedArgumentValues( return result; } + @NotNull public static Map getArgumentValues( GraphQLCodeRegistry codeRegistry, List argumentTypes, @@ -320,6 +318,7 @@ public static T getInputValueImpl( } + @NotNull private static Map getArgumentValuesImpl( InputInterceptor inputInterceptor, GraphqlFieldVisibility fieldVisibility, @@ -376,41 +375,14 @@ private static Map getArgumentValuesImpl( locale); coercedValues.put(argumentName, value); } - // @oneOf input must be checked now that all variables and literals have been converted - GraphQLType unwrappedType = GraphQLTypeUtil.unwrapNonNull(argumentType); - if (unwrappedType instanceof GraphQLInputObjectType) { - GraphQLInputObjectType inputObjectType = (GraphQLInputObjectType) unwrappedType; - if (inputObjectType.isOneOf() && ! ValuesResolverConversion.isNullValue(value)) { - validateOneOfInputTypes(inputObjectType, argumentValue, argumentName, value, locale); - } - } - } - } - return coercedValues; - } + ValuesResolverOneOfValidation.validateOneOfInputTypes(argumentType, value, argumentValue, argumentName, locale); - @SuppressWarnings("unchecked") - private static void validateOneOfInputTypes(GraphQLInputObjectType oneOfInputType, Value argumentValue, String argumentName, Object inputValue, Locale locale) { - Assert.assertTrue(inputValue instanceof Map, () -> String.format("The coerced argument %s GraphQLInputObjectType is unexpectedly not a map", argumentName)); - Map objectMap = (Map) inputValue; - int mapSize; - if (argumentValue instanceof ObjectValue) { - mapSize = ((ObjectValue) argumentValue).getObjectFields().size(); - } else { - mapSize = objectMap.size(); - } - if (mapSize != 1) { - String msg = I18n.i18n(I18n.BundleType.Execution, locale) - .msg("Execution.handleOneOfNotOneFieldError", oneOfInputType.getName()); - throw new OneOfTooManyKeysException(msg); - } - String fieldName = objectMap.keySet().iterator().next(); - if (objectMap.get(fieldName) == null) { - String msg = I18n.i18n(I18n.BundleType.Execution, locale) - .msg("Execution.handleOneOfValueIsNullError", oneOfInputType.getName() + "." + fieldName); - throw new OneOfNullValueException(msg); + } } + + + return coercedValues; } private static Map argumentMap(List arguments) { diff --git a/src/main/java/graphql/execution/ValuesResolverLegacy.java b/src/main/java/graphql/execution/ValuesResolverLegacy.java index 81bc80ccdc..d5e58f4656 100644 --- a/src/main/java/graphql/execution/ValuesResolverLegacy.java +++ b/src/main/java/graphql/execution/ValuesResolverLegacy.java @@ -49,7 +49,7 @@ class ValuesResolverLegacy { */ @VisibleForTesting static Value valueToLiteralLegacy(Object value, GraphQLType type, GraphQLContext graphqlContext, Locale locale) { - assertTrue(!(value instanceof Value), () -> "Unexpected literal " + value); + assertTrue(!(value instanceof Value), "Unexpected literal %s", value); if (value == null) { return null; } diff --git a/src/main/java/graphql/execution/ValuesResolverOneOfValidation.java b/src/main/java/graphql/execution/ValuesResolverOneOfValidation.java new file mode 100644 index 0000000000..9955ce050d --- /dev/null +++ b/src/main/java/graphql/execution/ValuesResolverOneOfValidation.java @@ -0,0 +1,110 @@ +package graphql.execution; + +import graphql.Assert; +import graphql.Internal; +import graphql.i18n.I18n; +import graphql.language.ArrayValue; +import graphql.language.ObjectField; +import graphql.language.ObjectValue; +import graphql.language.Value; +import graphql.schema.GraphQLInputObjectField; +import graphql.schema.GraphQLInputObjectType; +import graphql.schema.GraphQLInputType; +import graphql.schema.GraphQLList; +import graphql.schema.GraphQLType; +import graphql.schema.GraphQLTypeUtil; + +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.stream.Collectors; + +import static graphql.schema.GraphQLTypeUtil.isList; + +@Internal +final class ValuesResolverOneOfValidation { + + @SuppressWarnings("unchecked") + static void validateOneOfInputTypes(GraphQLType type, Object inputValue, Value argumentValue, String argumentName, Locale locale) { + GraphQLType unwrappedNonNullType = GraphQLTypeUtil.unwrapNonNull(type); + + if (isList(unwrappedNonNullType) + && !ValuesResolverConversion.isNullValue(inputValue) + && inputValue instanceof List + && argumentValue instanceof ArrayValue) { + GraphQLType elementType = ((GraphQLList) unwrappedNonNullType).getWrappedType(); + List inputList = (List) inputValue; + List argumentList = ((ArrayValue) argumentValue).getValues(); + + for (int i = 0; i < argumentList.size(); i++) { + validateOneOfInputTypes(elementType, inputList.get(i), argumentList.get(i), argumentName, locale); + } + } + + if (unwrappedNonNullType instanceof GraphQLInputObjectType && !ValuesResolverConversion.isNullValue(inputValue)) { + Assert.assertTrue(inputValue instanceof Map, "The coerced argument %s GraphQLInputObjectType is unexpectedly not a map", argumentName); + Map objectMap = (Map) inputValue; + + GraphQLInputObjectType inputObjectType = (GraphQLInputObjectType) unwrappedNonNullType; + + if (inputObjectType.isOneOf()) { + validateOneOfInputTypesInternal(inputObjectType, argumentValue, objectMap, locale); + } + + for (GraphQLInputObjectField fieldDefinition : inputObjectType.getFields()) { + GraphQLInputType childFieldType = fieldDefinition.getType(); + String childFieldName = fieldDefinition.getName(); + Object childFieldInputValue = objectMap.get(childFieldName); + + if (argumentValue instanceof ObjectValue) { + List values = ((ObjectValue) argumentValue).getObjectFields().stream() + .filter(of -> of.getName().equals(childFieldName)) + .map(ObjectField::getValue) + .collect(Collectors.toList()); + + if (values.size() > 1) { + Assert.assertShouldNeverHappen("argument %s has %s object fields with the same name: '%s'. A maximum of 1 is expected", argumentName, values.size(), childFieldName); + } else if (!values.isEmpty()) { + validateOneOfInputTypes(childFieldType, childFieldInputValue, values.get(0), argumentName, locale); + } + } else { + validateOneOfInputTypes(childFieldType, childFieldInputValue, argumentValue, argumentName, locale); + } + } + } + } + + private static void validateOneOfInputTypesInternal(GraphQLInputObjectType oneOfInputType, Value argumentValue, Map objectMap, Locale locale) { + final String fieldName; + if (argumentValue instanceof ObjectValue) { + List objectFields = ((ObjectValue) argumentValue).getObjectFields(); + if (objectFields.size() != 1) { + throwNotOneFieldError(oneOfInputType, locale); + } + + fieldName = objectFields.iterator().next().getName(); + } else { + if (objectMap.size() != 1) { + throwNotOneFieldError(oneOfInputType, locale); + } + + fieldName = objectMap.keySet().iterator().next(); + } + + if (objectMap.get(fieldName) == null) { + throwValueIsNullError(oneOfInputType, locale, fieldName); + } + } + + private static void throwValueIsNullError(GraphQLInputObjectType oneOfInputType, Locale locale, String fieldName) { + String msg = I18n.i18n(I18n.BundleType.Execution, locale) + .msg("Execution.handleOneOfValueIsNullError", oneOfInputType.getName() + "." + fieldName); + throw new OneOfNullValueException(msg); + } + + private static void throwNotOneFieldError(GraphQLInputObjectType oneOfInputType, Locale locale) { + String msg = I18n.i18n(I18n.BundleType.Execution, locale) + .msg("Execution.handleOneOfNotOneFieldError", oneOfInputType.getName()); + throw new OneOfTooManyKeysException(msg); + } +} diff --git a/src/main/java/graphql/execution/conditional/ConditionalNodes.java b/src/main/java/graphql/execution/conditional/ConditionalNodes.java index 9c90deead0..7013c53bf3 100644 --- a/src/main/java/graphql/execution/conditional/ConditionalNodes.java +++ b/src/main/java/graphql/execution/conditional/ConditionalNodes.java @@ -93,7 +93,7 @@ private boolean getDirectiveResult(Map variables, List argumentValues = ValuesResolver.getArgumentValues(SkipDirective.getArguments(), foundDirective.getArguments(), CoercedVariables.of(variables), GraphQLContext.getDefault(), Locale.getDefault()); Object flag = argumentValues.get("if"); - Assert.assertTrue(flag instanceof Boolean, () -> String.format("The '%s' directive MUST have a value for the 'if' argument", directiveName)); + Assert.assertTrue(flag instanceof Boolean, "The '%s' directive MUST have a value for the 'if' argument", directiveName); return (Boolean) flag; } return defaultValue; diff --git a/src/main/java/graphql/execution/directives/QueryDirectives.java b/src/main/java/graphql/execution/directives/QueryDirectives.java index a7fafdad9b..162b6f3c53 100644 --- a/src/main/java/graphql/execution/directives/QueryDirectives.java +++ b/src/main/java/graphql/execution/directives/QueryDirectives.java @@ -1,6 +1,5 @@ package graphql.execution.directives; -import graphql.DeprecatedAt; import graphql.GraphQLContext; import graphql.PublicApi; import graphql.execution.CoercedVariables; @@ -56,8 +55,7 @@ public interface QueryDirectives { * * @deprecated - use the {@link QueryAppliedDirective} methods instead */ - @Deprecated - @DeprecatedAt("2022-02-24") + @Deprecated(since = "2022-02-24") Map> getImmediateDirectivesByName(); /** @@ -79,8 +77,7 @@ public interface QueryDirectives { * * @deprecated - use the {@link QueryAppliedDirective} methods instead */ - @Deprecated - @DeprecatedAt("2022-02-24") + @Deprecated(since = "2022-02-24") List getImmediateDirective(String directiveName); /** @@ -91,8 +88,7 @@ public interface QueryDirectives { * * @deprecated - use the {@link QueryAppliedDirective} methods instead */ - @Deprecated - @DeprecatedAt("2022-02-24") + @Deprecated(since = "2022-02-24") Map> getImmediateDirectivesByField(); /** diff --git a/src/main/java/graphql/execution/incremental/DeferredCallContext.java b/src/main/java/graphql/execution/incremental/DeferredCallContext.java new file mode 100644 index 0000000000..15f428966f --- /dev/null +++ b/src/main/java/graphql/execution/incremental/DeferredCallContext.java @@ -0,0 +1,41 @@ +package graphql.execution.incremental; + +import graphql.ExceptionWhileDataFetching; +import graphql.GraphQLError; +import graphql.Internal; +import graphql.execution.ResultPath; +import graphql.language.SourceLocation; + +import java.util.List; +import java.util.concurrent.CopyOnWriteArrayList; + +/** + * Contains data relevant to the execution of a {@link DeferredFragmentCall}. + *

+ * The responsibilities of this class are similar to {@link graphql.execution.ExecutionContext}, but restricted to the + * execution of a deferred call (instead of the whole GraphQL execution like {@link graphql.execution.ExecutionContext}). + *

+ * Some behaviours, like error capturing, need to be scoped to a single {@link DeferredFragmentCall}, because each defer payload + * contains its own distinct list of errors. + */ +@Internal +public class DeferredCallContext { + + private final List errors = new CopyOnWriteArrayList<>(); + + public void onFetchingException(ResultPath path, SourceLocation sourceLocation, Throwable throwable) { + ExceptionWhileDataFetching error = new ExceptionWhileDataFetching(path, throwable, sourceLocation); + onError(error); + } + + public void onError(GraphQLError graphqlError) { + errors.add(graphqlError); + } + + /** + * @return a list of errors that were encountered while executing this deferred call + */ + public List getErrors() { + return errors; + } +} diff --git a/src/main/java/graphql/execution/incremental/DeferredExecution.java b/src/main/java/graphql/execution/incremental/DeferredExecution.java new file mode 100644 index 0000000000..3f14f5922e --- /dev/null +++ b/src/main/java/graphql/execution/incremental/DeferredExecution.java @@ -0,0 +1,26 @@ +package graphql.execution.incremental; + +import graphql.ExperimentalApi; +import graphql.normalized.incremental.NormalizedDeferredExecution; + +import javax.annotation.Nullable; + +/** + * Represents details about the defer execution that can be associated with a {@link graphql.execution.MergedField}. + *

+ * This representation is used during graphql execution. Check {@link NormalizedDeferredExecution} + * for the normalized representation of @defer. + */ +@ExperimentalApi +public class DeferredExecution { + private final String label; + + public DeferredExecution(String label) { + this.label = label; + } + + @Nullable + public String getLabel() { + return label; + } +} diff --git a/src/main/java/graphql/execution/incremental/DeferredExecutionSupport.java b/src/main/java/graphql/execution/incremental/DeferredExecutionSupport.java new file mode 100644 index 0000000000..034138d110 --- /dev/null +++ b/src/main/java/graphql/execution/incremental/DeferredExecutionSupport.java @@ -0,0 +1,206 @@ +package graphql.execution.incremental; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableListMultimap; +import com.google.common.collect.ImmutableSet; +import graphql.ExecutionResult; +import graphql.ExecutionResultImpl; +import graphql.Internal; +import graphql.execution.ExecutionContext; +import graphql.execution.ExecutionStrategyParameters; +import graphql.execution.FieldValueInfo; +import graphql.execution.MergedField; +import graphql.execution.MergedSelectionSet; +import graphql.execution.instrumentation.Instrumentation; +import graphql.incremental.IncrementalPayload; +import graphql.util.FpKit; + +import java.util.Collections; +import java.util.HashMap; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.CompletableFuture; +import java.util.function.BiFunction; +import java.util.function.Supplier; +import java.util.stream.Collectors; + +/** + * The purpose of this class hierarchy is to encapsulate most of the logic for deferring field execution, thus + * keeping the main execution strategy code clean and focused on the main execution logic. + *

+ * The {@link NoOp} instance should be used when incremental support is not enabled for the current execution. The + * methods in this class will return empty or no-op results, that should not impact the main execution. + *

+ * {@link DeferredExecutionSupportImpl} is the actual implementation that will be used when incremental support is enabled. + */ +@Internal +public interface DeferredExecutionSupport { + + boolean isDeferredField(MergedField mergedField); + + int deferredFieldsCount(); + + List getNonDeferredFieldNames(List allFieldNames); + + Set> createCalls(); + + DeferredExecutionSupport NOOP = new DeferredExecutionSupport.NoOp(); + + /** + * An implementation that actually executes the deferred fields. + */ + class DeferredExecutionSupportImpl implements DeferredExecutionSupport { + private final ImmutableListMultimap deferredExecutionToFields; + private final ImmutableSet deferredFields; + private final ImmutableList nonDeferredFieldNames; + private final ExecutionStrategyParameters parameters; + private final ExecutionContext executionContext; + private final BiFunction> resolveFieldWithInfoFn; + private final Map>> dfCache = new HashMap<>(); + + public DeferredExecutionSupportImpl( + MergedSelectionSet mergedSelectionSet, + ExecutionStrategyParameters parameters, + ExecutionContext executionContext, + BiFunction> resolveFieldWithInfoFn + ) { + this.executionContext = executionContext; + this.resolveFieldWithInfoFn = resolveFieldWithInfoFn; + ImmutableListMultimap.Builder deferredExecutionToFieldsBuilder = ImmutableListMultimap.builder(); + ImmutableSet.Builder deferredFieldsBuilder = ImmutableSet.builder(); + ImmutableList.Builder nonDeferredFieldNamesBuilder = ImmutableList.builder(); + + mergedSelectionSet.getSubFields().values().forEach(mergedField -> { + mergedField.getDeferredExecutions().forEach(de -> { + deferredExecutionToFieldsBuilder.put(de, mergedField); + deferredFieldsBuilder.add(mergedField); + }); + + if (mergedField.getDeferredExecutions().isEmpty()) { + nonDeferredFieldNamesBuilder.add(mergedField.getSingleField().getResultKey()); + } + }); + + this.deferredExecutionToFields = deferredExecutionToFieldsBuilder.build(); + this.deferredFields = deferredFieldsBuilder.build(); + this.parameters = parameters; + this.nonDeferredFieldNames = nonDeferredFieldNamesBuilder.build(); + } + + @Override + public boolean isDeferredField(MergedField mergedField) { + return deferredFields.contains(mergedField); + } + + @Override + public int deferredFieldsCount() { + return deferredFields.size(); + } + + @Override + public List getNonDeferredFieldNames(List allFieldNames) { + return this.nonDeferredFieldNames; + } + + @Override + public Set> createCalls() { + return deferredExecutionToFields.keySet().stream() + .map(this::createDeferredFragmentCall) + .collect(Collectors.toSet()); + } + + private DeferredFragmentCall createDeferredFragmentCall(DeferredExecution deferredExecution) { + DeferredCallContext deferredCallContext = new DeferredCallContext(); + + List mergedFields = deferredExecutionToFields.get(deferredExecution); + + List>> calls = mergedFields.stream() + .map(currentField -> this.createResultSupplier(currentField, deferredCallContext)) + .collect(Collectors.toList()); + + return new DeferredFragmentCall( + deferredExecution.getLabel(), + this.parameters.getPath(), + calls, + deferredCallContext + ); + } + + private Supplier> createResultSupplier( + MergedField currentField, + DeferredCallContext deferredCallContext + ) { + Map fields = new LinkedHashMap<>(); + fields.put(currentField.getResultKey(), currentField); + + ExecutionStrategyParameters callParameters = parameters.transform(builder -> + { + MergedSelectionSet mergedSelectionSet = MergedSelectionSet.newMergedSelectionSet().subFields(fields).build(); + builder.deferredCallContext(deferredCallContext) + .field(currentField) + .fields(mergedSelectionSet) + .path(parameters.getPath().segment(currentField.getResultKey())) + .parent(null); // this is a break in the parent -> child chain - it's a new start effectively + } + ); + + + Instrumentation instrumentation = executionContext.getInstrumentation(); + + executionContext.getDataLoaderDispatcherStrategy().deferredField(executionContext, currentField); + instrumentation.beginDeferredField(executionContext.getInstrumentationState()); + + return dfCache.computeIfAbsent( + currentField.getResultKey(), + // The same field can be associated with multiple defer executions, so + // we memoize the field resolution to avoid multiple calls to the same data fetcher + key -> FpKit.interThreadMemoize(() -> { + CompletableFuture fieldValueResult = resolveFieldWithInfoFn + .apply(executionContext, callParameters); + + // Create a reference to the CompletableFuture that resolves an ExecutionResult + // so we can pass it to the Instrumentation "onDispatched" callback. + CompletableFuture executionResultCF = fieldValueResult + .thenCompose(fvi -> fvi + .getFieldValueFuture() + .thenApply(fv -> ExecutionResultImpl.newExecutionResult().data(fv).build()) + ); + + return executionResultCF + .thenApply(executionResult -> + new DeferredFragmentCall.FieldWithExecutionResult(currentField.getResultKey(), executionResult) + ); + } + ) + ); + } + } + + /** + * A no-op implementation that should be used when incremental support is not enabled for the current execution. + */ + class NoOp implements DeferredExecutionSupport { + + @Override + public boolean isDeferredField(MergedField mergedField) { + return false; + } + + @Override + public int deferredFieldsCount() { + return 0; + } + + @Override + public List getNonDeferredFieldNames(List allFieldNames) { + return allFieldNames; + } + + @Override + public Set> createCalls() { + return Collections.emptySet(); + } + } +} diff --git a/src/main/java/graphql/execution/incremental/DeferredFragmentCall.java b/src/main/java/graphql/execution/incremental/DeferredFragmentCall.java new file mode 100644 index 0000000000..79b9496fd1 --- /dev/null +++ b/src/main/java/graphql/execution/incremental/DeferredFragmentCall.java @@ -0,0 +1,135 @@ +package graphql.execution.incremental; + +import com.google.common.collect.ImmutableList; +import graphql.ExecutionResult; +import graphql.GraphQLError; +import graphql.Internal; +import graphql.execution.Async; +import graphql.execution.NonNullableFieldWasNullError; +import graphql.execution.NonNullableFieldWasNullException; +import graphql.execution.ResultPath; +import graphql.incremental.DeferPayload; + +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.CompletionException; +import java.util.function.Supplier; + +/** + * Represents a deferred call (aka @defer) to get an execution result sometime after the initial query has returned. + *

+ * A deferred call can encompass multiple fields. The deferred call will resolve once all sub-fields resolve. + *

+ * For example, this query: + *

+ * {
+ *     post {
+ *         ... @defer(label: "defer-post") {
+ *             text
+ *             summary
+ *         }
+ *     }
+ * }
+ * 
+ * Will result on 1 instance of `DeferredCall`, containing calls for the 2 fields: "text" and "summary". + */ +@Internal +public class DeferredFragmentCall implements IncrementalCall { + private final String label; + + public ResultPath getPath() { + return path; + } + + private final ResultPath path; + private final List>> calls; + private final DeferredCallContext deferredCallContext; + + public DeferredFragmentCall( + String label, + ResultPath path, + List>> calls, + DeferredCallContext deferredCallContext + ) { + this.label = label; + this.path = path; + this.calls = calls; + this.deferredCallContext = deferredCallContext; + } + + @Override + public CompletableFuture invoke() { + Async.CombinedBuilder futures = Async.ofExpectedSize(calls.size()); + + calls.forEach(call -> { + CompletableFuture cf = call.get(); + futures.add(cf); + }); + + return futures.await() + .thenApply(this::transformToDeferredPayload) + .handle(this::handleNonNullableFieldError); + } + + /** + * Non-nullable errors need special treatment. + * When they happen, all the sibling fields will be ignored in the result. So as soon as one of the field calls + * throw this error, we can ignore the {@link ExecutionResult} from all the fields associated with this {@link DeferredFragmentCall} + * and build a special {@link DeferPayload} that captures the details of the error. + */ + private DeferPayload handleNonNullableFieldError(DeferPayload result, Throwable throwable) { + if (throwable != null) { + Throwable cause = throwable.getCause(); + if (cause instanceof NonNullableFieldWasNullException) { + GraphQLError error = new NonNullableFieldWasNullError((NonNullableFieldWasNullException) cause); + return DeferPayload.newDeferredItem() + .errors(Collections.singletonList(error)) + .label(label) + .path(path) + .build(); + } + if (cause instanceof CompletionException) { + throw (CompletionException) cause; + } + throw new CompletionException(cause); + } + return result; + } + + private DeferPayload transformToDeferredPayload(List fieldWithExecutionResults) { + List errorsEncountered = deferredCallContext.getErrors(); + + Map dataMap = new HashMap<>(); + + ImmutableList.Builder errorsBuilder = ImmutableList.builder(); + + fieldWithExecutionResults.forEach(entry -> { + dataMap.put(entry.resultKey, entry.executionResult.getData()); + errorsBuilder.addAll(entry.executionResult.getErrors()); + }); + + return DeferPayload.newDeferredItem() + .errors(errorsEncountered) + .path(path) + .label(label) + .data(dataMap) + .build(); + } + + public static class FieldWithExecutionResult { + private final String resultKey; + private final ExecutionResult executionResult; + + public FieldWithExecutionResult(String resultKey, ExecutionResult executionResult) { + this.resultKey = resultKey; + this.executionResult = executionResult; + } + + public ExecutionResult getExecutionResult() { + return executionResult; + } + } +} diff --git a/src/main/java/graphql/execution/incremental/IncrementalCall.java b/src/main/java/graphql/execution/incremental/IncrementalCall.java new file mode 100644 index 0000000000..7d36d48f69 --- /dev/null +++ b/src/main/java/graphql/execution/incremental/IncrementalCall.java @@ -0,0 +1,14 @@ +package graphql.execution.incremental; + +import graphql.incremental.IncrementalPayload; + +import java.util.concurrent.CompletableFuture; + +/** + * Represents an incremental call (resulted from the usage of @defer or @stream). + * + * @param the type of the payload that this call resolves. + */ +public interface IncrementalCall { + CompletableFuture invoke(); +} diff --git a/src/main/java/graphql/execution/incremental/IncrementalCallState.java b/src/main/java/graphql/execution/incremental/IncrementalCallState.java new file mode 100644 index 0000000000..f96a706f36 --- /dev/null +++ b/src/main/java/graphql/execution/incremental/IncrementalCallState.java @@ -0,0 +1,97 @@ +package graphql.execution.incremental; + +import graphql.Internal; +import graphql.execution.reactive.SingleSubscriberPublisher; +import graphql.incremental.DelayedIncrementalPartialResult; +import graphql.incremental.IncrementalPayload; +import graphql.util.LockKit; +import org.reactivestreams.Publisher; + +import java.util.Collection; +import java.util.Collections; +import java.util.Deque; +import java.util.concurrent.ConcurrentLinkedDeque; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicInteger; + +import static graphql.incremental.DelayedIncrementalPartialResultImpl.newIncrementalExecutionResult; + +/** + * This provides support for @defer directives on fields that mean that results will be sent AFTER + * the main result is sent via a Publisher stream. + */ +@Internal +public class IncrementalCallState { + private final AtomicBoolean incrementalCallsDetected = new AtomicBoolean(false); + private final Deque> incrementalCalls = new ConcurrentLinkedDeque<>(); + private final SingleSubscriberPublisher publisher = new SingleSubscriberPublisher<>(); + private final AtomicInteger pendingCalls = new AtomicInteger(); + private final LockKit.ReentrantLock publisherLock = new LockKit.ReentrantLock(); + + @SuppressWarnings("FutureReturnValueIgnored") + private void drainIncrementalCalls() { + IncrementalCall incrementalCall = incrementalCalls.poll(); + + while (incrementalCall != null) { + incrementalCall.invoke() + .whenComplete((payload, exception) -> { + if (exception != null) { + publisher.offerError(exception); + return; + } + + // The assigment of `remainingCalls` and `publisher.offer` need to be synchronized to ensure + // `hasNext` is `false` precisely on the last event offered to the publisher. + publisherLock.lock(); + final int remainingCalls; + + try { + remainingCalls = pendingCalls.decrementAndGet(); + + DelayedIncrementalPartialResult executionResult = newIncrementalExecutionResult() + .incrementalItems(Collections.singletonList(payload)) + .hasNext(remainingCalls != 0) + .build(); + + publisher.offer(executionResult); + } finally { + publisherLock.unlock(); + } + + if (remainingCalls == 0) { + publisher.noMoreData(); + } else { + // Nested calls were added, let's try to drain the queue again. + drainIncrementalCalls(); + } + }); + incrementalCall = incrementalCalls.poll(); + } + } + + public void enqueue(IncrementalCall incrementalCall) { + publisherLock.runLocked(() -> { + incrementalCallsDetected.set(true); + incrementalCalls.offer(incrementalCall); + pendingCalls.incrementAndGet(); + }); + } + + public void enqueue(Collection> calls) { + calls.forEach(this::enqueue); + } + + public boolean getIncrementalCallsDetected() { + return incrementalCallsDetected.get(); + } + + /** + * When this is called the deferred execution will begin + * + * @return the publisher of deferred results + */ + public Publisher startDeferredCalls() { + drainIncrementalCalls(); + return publisher; + } +} diff --git a/src/main/java/graphql/execution/incremental/IncrementalUtils.java b/src/main/java/graphql/execution/incremental/IncrementalUtils.java new file mode 100644 index 0000000000..2a89ade3fd --- /dev/null +++ b/src/main/java/graphql/execution/incremental/IncrementalUtils.java @@ -0,0 +1,53 @@ +package graphql.execution.incremental; + +import graphql.Assert; +import graphql.GraphQLContext; +import graphql.Internal; +import graphql.execution.CoercedVariables; +import graphql.execution.ValuesResolver; +import graphql.language.Directive; +import graphql.language.NodeUtil; + +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.function.Function; + +import static graphql.Directives.DeferDirective; + +@Internal +public class IncrementalUtils { + private IncrementalUtils() { + } + + public static T createDeferredExecution( + Map variables, + List directives, + Function builderFunction + ) { + Directive deferDirective = NodeUtil.findNodeByName(directives, DeferDirective.getName()); + + if (deferDirective != null) { + Map argumentValues = ValuesResolver.getArgumentValues(DeferDirective.getArguments(), deferDirective.getArguments(), CoercedVariables.of(variables), GraphQLContext.getDefault(), Locale.getDefault()); + + Object flag = argumentValues.get("if"); + Assert.assertTrue(flag instanceof Boolean, "The '%s' directive MUST have a value for the 'if' argument", DeferDirective.getName()); + + if (!((Boolean) flag)) { + return null; + } + + Object label = argumentValues.get("label"); + + if (label == null) { + return builderFunction.apply(null); + } + + Assert.assertTrue(label instanceof String, "The 'label' argument from the '%s' directive MUST contain a String value", DeferDirective.getName()); + + return builderFunction.apply((String) label); + } + + return null; + } +} diff --git a/src/main/java/graphql/execution/incremental/StreamedCall.java b/src/main/java/graphql/execution/incremental/StreamedCall.java new file mode 100644 index 0000000000..beae535b3e --- /dev/null +++ b/src/main/java/graphql/execution/incremental/StreamedCall.java @@ -0,0 +1,19 @@ +package graphql.execution.incremental; + +import graphql.Internal; +import graphql.incremental.StreamPayload; + +import java.util.concurrent.CompletableFuture; + +/** + * Represents a call that fetches data that was streamed, via the @stream directive. + *

+ * This is a placeholder class, created to showcase the proposed structure that accommodates both @defer and @stream execution. + */ +@Internal +public class StreamedCall implements IncrementalCall { + @Override + public CompletableFuture invoke() { + throw new UnsupportedOperationException("Not implemented yet."); + } +} diff --git a/src/main/java/graphql/execution/instrumentation/ChainedInstrumentation.java b/src/main/java/graphql/execution/instrumentation/ChainedInstrumentation.java index 70e7bd063f..6decb929cd 100644 --- a/src/main/java/graphql/execution/instrumentation/ChainedInstrumentation.java +++ b/src/main/java/graphql/execution/instrumentation/ChainedInstrumentation.java @@ -1,10 +1,10 @@ package graphql.execution.instrumentation; import com.google.common.collect.ImmutableList; -import com.google.common.collect.Maps; import graphql.Assert; import graphql.ExecutionInput; import graphql.ExecutionResult; +import graphql.ExperimentalApi; import graphql.PublicApi; import graphql.execution.Async; import graphql.execution.ExecutionContext; @@ -24,14 +24,16 @@ import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; +import java.util.AbstractMap; import java.util.Arrays; +import java.util.Collections; import java.util.List; import java.util.Map; import java.util.concurrent.CompletableFuture; -import java.util.function.Function; +import java.util.function.BiConsumer; +import java.util.function.BiFunction; import static graphql.Assert.assertNotNull; -import static graphql.collect.ImmutableKit.mapAndDropNulls; /** * This allows you to chain together a number of {@link graphql.execution.instrumentation.Instrumentation} implementations @@ -41,7 +43,6 @@ * * @see graphql.execution.instrumentation.Instrumentation */ -@SuppressWarnings("deprecation") @PublicApi public class ChainedInstrumentation implements Instrumentation { @@ -64,31 +65,50 @@ public List getInstrumentations() { return instrumentations; } - protected InstrumentationState getSpecificState(Instrumentation instrumentation, InstrumentationState parametersInstrumentationState) { - ChainedInstrumentationState chainedInstrumentationState = (ChainedInstrumentationState) parametersInstrumentationState; - return chainedInstrumentationState.getState(instrumentation); - } - - private InstrumentationContext chainedCtx(Function> mapper) { + private InstrumentationContext chainedCtx(InstrumentationState state, BiFunction> mapper) { // if we have zero or 1 instrumentations (and 1 is the most common), then we can avoid an object allocation // of the ChainedInstrumentationContext since it won't be needed if (instrumentations.isEmpty()) { return SimpleInstrumentationContext.noOp(); } + ChainedInstrumentationState chainedInstrumentationState = (ChainedInstrumentationState) state; if (instrumentations.size() == 1) { - return mapper.apply(instrumentations.get(0)); + return mapper.apply(instrumentations.get(0), chainedInstrumentationState.getState(0)); } - return new ChainedInstrumentationContext<>(mapAndDropNulls(instrumentations, mapper)); + return new ChainedInstrumentationContext<>(chainedMapAndDropNulls(chainedInstrumentationState, mapper)); } - @Override - public InstrumentationState createState() { - return Assert.assertShouldNeverHappen("createStateAsync should only ever be used"); + private T chainedInstrument(InstrumentationState state, T input, ChainedInstrumentationFunction mapper) { + ChainedInstrumentationState chainedInstrumentationState = (ChainedInstrumentationState) state; + for (int i = 0; i < instrumentations.size(); i++) { + Instrumentation instrumentation = instrumentations.get(i); + InstrumentationState specificState = chainedInstrumentationState.getState(i); + input = mapper.apply(instrumentation, specificState, input); + } + return input; + } + + protected ImmutableList chainedMapAndDropNulls(InstrumentationState state, BiFunction mapper) { + ChainedInstrumentationState chainedInstrumentationState = (ChainedInstrumentationState) state; + ImmutableList.Builder result = ImmutableList.builderWithExpectedSize(instrumentations.size()); + for (int i = 0; i < instrumentations.size(); i++) { + Instrumentation instrumentation = instrumentations.get(i); + InstrumentationState specificState = chainedInstrumentationState.getState(i); + T value = mapper.apply(instrumentation, specificState); + if (value != null) { + result.add(value); + } + } + return result.build(); } - @Override - public @Nullable InstrumentationState createState(InstrumentationCreateStateParameters parameters) { - return Assert.assertShouldNeverHappen("createStateAsync should only ever be used"); + protected void chainedConsume(InstrumentationState state, BiConsumer stateConsumer) { + ChainedInstrumentationState chainedInstrumentationState = (ChainedInstrumentationState) state; + for (int i = 0; i < instrumentations.size(); i++) { + Instrumentation instrumentation = instrumentations.get(i); + InstrumentationState specificState = chainedInstrumentationState.getState(i); + stateConsumer.accept(instrumentation, specificState); + } } @Override @@ -96,283 +116,144 @@ public InstrumentationState createState() { return ChainedInstrumentationState.combineAll(instrumentations, parameters); } - @Override - @NotNull - public InstrumentationContext beginExecution(InstrumentationExecutionParameters parameters) { - // these assert methods have been left in so that we truly never call these methods, either in production nor in tests - // later when the deprecated methods are removed, this will disappear. - return Assert.assertShouldNeverHappen("The deprecated " + "beginExecution" + " was called"); - } - @Override public InstrumentationContext beginExecution(InstrumentationExecutionParameters parameters, InstrumentationState state) { - return chainedCtx(instrumentation -> { - InstrumentationState specificState = getSpecificState(instrumentation, state); - return instrumentation.beginExecution(parameters, specificState); - }); + return chainedCtx(state, (instrumentation, specificState) -> instrumentation.beginExecution(parameters, specificState)); } - @Override - @NotNull - public InstrumentationContext beginParse(InstrumentationExecutionParameters parameters) { - return Assert.assertShouldNeverHappen("The deprecated " + "beginParse" + " was called"); - } @Override public InstrumentationContext beginParse(InstrumentationExecutionParameters parameters, InstrumentationState state) { - return chainedCtx(instrumentation -> { - InstrumentationState specificState = getSpecificState(instrumentation, state); - return instrumentation.beginParse(parameters, specificState); - }); + return chainedCtx(state, (instrumentation, specificState) -> instrumentation.beginParse(parameters, specificState)); } - @Override - @NotNull - public InstrumentationContext> beginValidation(InstrumentationValidationParameters parameters) { - return Assert.assertShouldNeverHappen("The deprecated " + "beginValidation" + " was called"); - } @Override public InstrumentationContext> beginValidation(InstrumentationValidationParameters parameters, InstrumentationState state) { - return chainedCtx(instrumentation -> { - InstrumentationState specificState = getSpecificState(instrumentation, state); - return instrumentation.beginValidation(parameters, specificState); - }); - } - - @Override - @NotNull - public InstrumentationContext beginExecuteOperation(InstrumentationExecuteOperationParameters parameters) { - return Assert.assertShouldNeverHappen("The deprecated " + "beginExecuteOperation" + " was called"); + return chainedCtx(state, (instrumentation, specificState) -> instrumentation.beginValidation(parameters, specificState)); } @Override public InstrumentationContext beginExecuteOperation(InstrumentationExecuteOperationParameters parameters, InstrumentationState state) { - return chainedCtx(instrumentation -> { - InstrumentationState specificState = getSpecificState(instrumentation, state); - return instrumentation.beginExecuteOperation(parameters, specificState); - }); + return chainedCtx(state, (instrumentation, specificState) -> instrumentation.beginExecuteOperation(parameters, specificState)); } - @Override - @NotNull - public ExecutionStrategyInstrumentationContext beginExecutionStrategy(InstrumentationExecutionStrategyParameters parameters) { - return Assert.assertShouldNeverHappen("The deprecated " + "beginExecutionStrategy" + " was called"); - } @Override public ExecutionStrategyInstrumentationContext beginExecutionStrategy(InstrumentationExecutionStrategyParameters parameters, InstrumentationState state) { if (instrumentations.isEmpty()) { return ExecutionStrategyInstrumentationContext.NOOP; } - Function mapper = instrumentation -> { - InstrumentationState specificState = getSpecificState(instrumentation, state); - return instrumentation.beginExecutionStrategy(parameters, specificState); - }; + BiFunction mapper = (instrumentation, specificState) -> instrumentation.beginExecutionStrategy(parameters, specificState); + ChainedInstrumentationState chainedInstrumentationState = (ChainedInstrumentationState) state; if (instrumentations.size() == 1) { - return mapper.apply(instrumentations.get(0)); + return mapper.apply(instrumentations.get(0), chainedInstrumentationState.getState(0)); } - return new ChainedExecutionStrategyInstrumentationContext(mapAndDropNulls(instrumentations, mapper)); + return new ChainedExecutionStrategyInstrumentationContext(chainedMapAndDropNulls(chainedInstrumentationState, mapper)); } @Override - @NotNull - public InstrumentationContext beginSubscribedFieldEvent(InstrumentationFieldParameters parameters) { - return Assert.assertShouldNeverHappen("The deprecated " + "beginSubscribedFieldEvent" + " was called"); + public @Nullable ExecuteObjectInstrumentationContext beginExecuteObject(InstrumentationExecutionStrategyParameters parameters, InstrumentationState state) { + if (instrumentations.isEmpty()) { + return ExecuteObjectInstrumentationContext.NOOP; + } + BiFunction mapper = (instrumentation, specificState) -> instrumentation.beginExecuteObject(parameters, specificState); + ChainedInstrumentationState chainedInstrumentationState = (ChainedInstrumentationState) state; + if (instrumentations.size() == 1) { + return mapper.apply(instrumentations.get(0), chainedInstrumentationState.getState(0)); + } + return new ChainedExecuteObjectInstrumentationContext(chainedMapAndDropNulls(chainedInstrumentationState, mapper)); } + @ExperimentalApi @Override - public InstrumentationContext beginSubscribedFieldEvent(InstrumentationFieldParameters parameters, InstrumentationState state) { - return chainedCtx(instrumentation -> { - InstrumentationState specificState = getSpecificState(instrumentation, state); - return instrumentation.beginSubscribedFieldEvent(parameters, specificState); - }); + public InstrumentationContext beginDeferredField(InstrumentationState instrumentationState) { + return new ChainedDeferredExecutionStrategyInstrumentationContext(chainedMapAndDropNulls(instrumentationState, Instrumentation::beginDeferredField)); } - @Override - @NotNull - public InstrumentationContext beginField(InstrumentationFieldParameters parameters) { - return Assert.assertShouldNeverHappen("The deprecated " + "beginField" + " was called"); - } @Override - public InstrumentationContext beginField(InstrumentationFieldParameters parameters, InstrumentationState state) { - return chainedCtx(instrumentation -> { - InstrumentationState specificState = getSpecificState(instrumentation, state); - return instrumentation.beginField(parameters, specificState); - }); + public InstrumentationContext beginSubscribedFieldEvent(InstrumentationFieldParameters parameters, InstrumentationState state) { + return chainedCtx(state, (instrumentation, specificState) -> instrumentation.beginSubscribedFieldEvent(parameters, specificState)); } @Override - @NotNull - public InstrumentationContext beginFieldFetch(InstrumentationFieldFetchParameters parameters) { - return Assert.assertShouldNeverHappen("The deprecated " + "beginFieldFetch" + " was called"); + public @Nullable InstrumentationContext beginFieldExecution(InstrumentationFieldParameters parameters, InstrumentationState state) { + return chainedCtx(state, (instrumentation, specificState) -> instrumentation.beginFieldExecution(parameters, specificState)); } @Override public InstrumentationContext beginFieldFetch(InstrumentationFieldFetchParameters parameters, InstrumentationState state) { - return chainedCtx(instrumentation -> { - InstrumentationState specificState = getSpecificState(instrumentation, state); - return instrumentation.beginFieldFetch(parameters, specificState); - }); - } - - - @Override - @NotNull - public InstrumentationContext beginFieldComplete(InstrumentationFieldCompleteParameters parameters) { - return Assert.assertShouldNeverHappen("The deprecated " + "beginFieldComplete" + " was called"); - } - - @Override - public InstrumentationContext beginFieldComplete(InstrumentationFieldCompleteParameters parameters, InstrumentationState state) { - return chainedCtx(instrumentation -> { - InstrumentationState specificState = getSpecificState(instrumentation, state); - return instrumentation.beginFieldComplete(parameters, specificState); - }); + return chainedCtx(state, (instrumentation, specificState) -> instrumentation.beginFieldFetch(parameters, specificState)); } @Override - @NotNull - public InstrumentationContext beginFieldListComplete(InstrumentationFieldCompleteParameters parameters) { - return Assert.assertShouldNeverHappen("The deprecated " + "beginFieldListComplete" + " was called"); + public @Nullable InstrumentationContext beginFieldCompletion(InstrumentationFieldCompleteParameters parameters, InstrumentationState state) { + return chainedCtx(state, (instrumentation, specificState) -> instrumentation.beginFieldCompletion(parameters, specificState)); } - @Override - public InstrumentationContext beginFieldListComplete(InstrumentationFieldCompleteParameters parameters, InstrumentationState state) { - return chainedCtx(instrumentation -> { - InstrumentationState specificState = getSpecificState(instrumentation, state); - return instrumentation.beginFieldListComplete(parameters, specificState); - }); - } @Override - @NotNull - public ExecutionInput instrumentExecutionInput(ExecutionInput executionInput, InstrumentationExecutionParameters parameters) { - return Assert.assertShouldNeverHappen("The deprecated " + "instrumentExecutionInput" + " was called"); + public @Nullable InstrumentationContext beginFieldListCompletion(InstrumentationFieldCompleteParameters parameters, InstrumentationState state) { + return chainedCtx(state, (instrumentation, specificState) -> instrumentation.beginFieldListCompletion(parameters, specificState)); } @NotNull @Override public ExecutionInput instrumentExecutionInput(ExecutionInput executionInput, InstrumentationExecutionParameters parameters, InstrumentationState state) { - if (instrumentations.isEmpty()) { - return executionInput; - } - for (Instrumentation instrumentation : instrumentations) { - InstrumentationState specificState = getSpecificState(instrumentation, state); - executionInput = instrumentation.instrumentExecutionInput(executionInput, parameters, specificState); - } - return executionInput; - } - - @Override - @NotNull - public DocumentAndVariables instrumentDocumentAndVariables(DocumentAndVariables documentAndVariables, InstrumentationExecutionParameters parameters) { - return Assert.assertShouldNeverHappen("The deprecated " + "instrumentDocumentAndVariables" + " was called"); + return chainedInstrument(state, executionInput, (instrumentation, specificState, accumulator) -> instrumentation.instrumentExecutionInput(accumulator, parameters, specificState)); } @NotNull @Override public DocumentAndVariables instrumentDocumentAndVariables(DocumentAndVariables documentAndVariables, InstrumentationExecutionParameters parameters, InstrumentationState state) { - if (instrumentations.isEmpty()) { - return documentAndVariables; - } - for (Instrumentation instrumentation : instrumentations) { - InstrumentationState specificState = getSpecificState(instrumentation, state); - documentAndVariables = instrumentation.instrumentDocumentAndVariables(documentAndVariables, parameters, specificState); - } - return documentAndVariables; - } - - @Override - @NotNull - public GraphQLSchema instrumentSchema(GraphQLSchema schema, InstrumentationExecutionParameters parameters) { - return Assert.assertShouldNeverHappen("The deprecated " + "instrumentSchema" + " was called"); + return chainedInstrument(state, documentAndVariables, (instrumentation, specificState, accumulator) -> + instrumentation.instrumentDocumentAndVariables(accumulator, parameters, specificState)); } @NotNull @Override public GraphQLSchema instrumentSchema(GraphQLSchema schema, InstrumentationExecutionParameters parameters, InstrumentationState state) { - if (instrumentations.isEmpty()) { - return schema; - } - for (Instrumentation instrumentation : instrumentations) { - InstrumentationState specificState = getSpecificState(instrumentation, state); - schema = instrumentation.instrumentSchema(schema, parameters, specificState); - } - return schema; - } - - @Override - @NotNull - public ExecutionContext instrumentExecutionContext(ExecutionContext executionContext, InstrumentationExecutionParameters parameters) { - return Assert.assertShouldNeverHappen("The deprecated " + "instrumentExecutionContext" + " was called"); + return chainedInstrument(state, schema, (instrumentation, specificState, accumulator) -> + instrumentation.instrumentSchema(accumulator, parameters, specificState)); } @NotNull @Override public ExecutionContext instrumentExecutionContext(ExecutionContext executionContext, InstrumentationExecutionParameters parameters, InstrumentationState state) { - if (instrumentations.isEmpty()) { - return executionContext; - } - for (Instrumentation instrumentation : instrumentations) { - InstrumentationState specificState = getSpecificState(instrumentation, state); - executionContext = instrumentation.instrumentExecutionContext(executionContext, parameters, specificState); - } - return executionContext; - } - - @Override - @NotNull - public DataFetcher instrumentDataFetcher(DataFetcher dataFetcher, InstrumentationFieldFetchParameters parameters) { - return Assert.assertShouldNeverHappen("The deprecated " + "instrumentDataFetcher" + " was called"); + return chainedInstrument(state, executionContext, (instrumentation, specificState, accumulator) -> + instrumentation.instrumentExecutionContext(accumulator, parameters, specificState)); } @NotNull @Override public DataFetcher instrumentDataFetcher(DataFetcher dataFetcher, InstrumentationFieldFetchParameters parameters, InstrumentationState state) { - if (instrumentations.isEmpty()) { - return dataFetcher; - } - for (Instrumentation instrumentation : instrumentations) { - InstrumentationState specificState = getSpecificState(instrumentation, state); - dataFetcher = instrumentation.instrumentDataFetcher(dataFetcher, parameters, specificState); - } - return dataFetcher; - } - - @Override - @NotNull - public CompletableFuture instrumentExecutionResult(ExecutionResult executionResult, InstrumentationExecutionParameters parameters) { - return Assert.assertShouldNeverHappen("The deprecated " + "instrumentExecutionResult" + " was called"); + return chainedInstrument(state, dataFetcher, (Instrumentation instrumentation, InstrumentationState specificState, DataFetcher accumulator) -> + instrumentation.instrumentDataFetcher(accumulator, parameters, specificState)); } @NotNull @Override public CompletableFuture instrumentExecutionResult(ExecutionResult executionResult, InstrumentationExecutionParameters parameters, InstrumentationState state) { - CompletableFuture> resultsFuture = Async.eachSequentially(instrumentations, (instrumentation, prevResults) -> { - InstrumentationState specificState = getSpecificState(instrumentation, state); - ExecutionResult lastResult = prevResults.size() > 0 ? prevResults.get(prevResults.size() - 1) : executionResult; + ImmutableList> entries = chainedMapAndDropNulls(state, AbstractMap.SimpleEntry::new); + CompletableFuture> resultsFuture = Async.eachSequentially(entries, (entry, prevResults) -> { + Instrumentation instrumentation = entry.getKey(); + InstrumentationState specificState = entry.getValue(); + ExecutionResult lastResult = !prevResults.isEmpty() ? prevResults.get(prevResults.size() - 1) : executionResult; return instrumentation.instrumentExecutionResult(lastResult, parameters, specificState); }); return resultsFuture.thenApply((results) -> results.isEmpty() ? executionResult : results.get(results.size() - 1)); } static class ChainedInstrumentationState implements InstrumentationState { - private final Map instrumentationToStates; - + private final List instrumentationStates; - private ChainedInstrumentationState(List instrumentations, List instrumentationStates) { - instrumentationToStates = Maps.newLinkedHashMapWithExpectedSize(instrumentations.size()); - for (int i = 0; i < instrumentations.size(); i++) { - Instrumentation instrumentation = instrumentations.get(i); - InstrumentationState instrumentationState = instrumentationStates.get(i); - instrumentationToStates.put(instrumentation, instrumentationState); - } + private ChainedInstrumentationState(List instrumentationStates) { + this.instrumentationStates = instrumentationStates; } - private InstrumentationState getState(Instrumentation instrumentation) { - return instrumentationToStates.get(instrumentation); + private InstrumentationState getState(int index) { + return instrumentationStates.get(index); } private static CompletableFuture combineAll(List instrumentations, InstrumentationCreateStateParameters parameters) { @@ -382,7 +263,7 @@ private static CompletableFuture combineAll(List stateCF = Async.orNullCompletedFuture(instrumentation.createStateAsync(parameters)); builder.add(stateCF); } - return builder.await().thenApply(instrumentationStates -> new ChainedInstrumentationState(instrumentations, instrumentationStates)); + return builder.await().thenApply(ChainedInstrumentationState::new); } } @@ -395,8 +276,8 @@ private static class ChainedInstrumentationContext implements Instrumentation } @Override - public void onDispatched(CompletableFuture result) { - contexts.forEach(context -> context.onDispatched(result)); + public void onDispatched() { + contexts.forEach(InstrumentationContext::onDispatched); } @Override @@ -414,8 +295,8 @@ private static class ChainedExecutionStrategyInstrumentationContext implements E } @Override - public void onDispatched(CompletableFuture result) { - contexts.forEach(context -> context.onDispatched(result)); + public void onDispatched() { + contexts.forEach(InstrumentationContext::onDispatched); } @Override @@ -434,5 +315,59 @@ public void onFieldValuesException() { } } + private static class ChainedExecuteObjectInstrumentationContext implements ExecuteObjectInstrumentationContext { + + private final ImmutableList contexts; + + ChainedExecuteObjectInstrumentationContext(ImmutableList contexts) { + this.contexts = contexts; + } + + @Override + public void onDispatched() { + contexts.forEach(InstrumentationContext::onDispatched); + } + + @Override + public void onCompleted(Map result, Throwable t) { + contexts.forEach(context -> context.onCompleted(result, t)); + } + + @Override + public void onFieldValuesInfo(List fieldValueInfoList) { + contexts.forEach(context -> context.onFieldValuesInfo(fieldValueInfoList)); + } + + @Override + public void onFieldValuesException() { + contexts.forEach(ExecuteObjectInstrumentationContext::onFieldValuesException); + } + } + + private static class ChainedDeferredExecutionStrategyInstrumentationContext implements InstrumentationContext { + + private final List> contexts; + + ChainedDeferredExecutionStrategyInstrumentationContext(List> contexts) { + this.contexts = Collections.unmodifiableList(contexts); + } + + @Override + public void onDispatched() { + contexts.forEach(InstrumentationContext::onDispatched); + } + + @Override + public void onCompleted(Object result, Throwable t) { + contexts.forEach(context -> context.onCompleted(result, t)); + } + } + + @FunctionalInterface + private interface ChainedInstrumentationFunction { + R apply(I instrumentation, S state, V value); + } + + } diff --git a/src/main/java/graphql/execution/instrumentation/ExecuteObjectInstrumentationContext.java b/src/main/java/graphql/execution/instrumentation/ExecuteObjectInstrumentationContext.java new file mode 100644 index 0000000000..4e100238df --- /dev/null +++ b/src/main/java/graphql/execution/instrumentation/ExecuteObjectInstrumentationContext.java @@ -0,0 +1,45 @@ +package graphql.execution.instrumentation; + +import graphql.Internal; +import graphql.PublicSpi; +import graphql.execution.FieldValueInfo; +import org.jetbrains.annotations.NotNull; + +import java.util.List; +import java.util.Map; +import java.util.concurrent.CompletableFuture; + +@PublicSpi +public interface ExecuteObjectInstrumentationContext extends InstrumentationContext> { + + @Internal + ExecuteObjectInstrumentationContext NOOP = new ExecuteObjectInstrumentationContext() { + @Override + public void onDispatched() { + } + + @Override + public void onCompleted(Map result, Throwable t) { + } + }; + + /** + * This creates a no-op {@link InstrumentationContext} if the one pass in is null + * + * @param nullableContext a {@link InstrumentationContext} that can be null + * + * @return a non null {@link InstrumentationContext} that maybe a no-op + */ + @NotNull + @Internal + static ExecuteObjectInstrumentationContext nonNullCtx(ExecuteObjectInstrumentationContext nullableContext) { + return nullableContext == null ? NOOP : nullableContext; + } + + default void onFieldValuesInfo(List fieldValueInfoList) { + } + + default void onFieldValuesException() { + } + +} diff --git a/src/main/java/graphql/execution/instrumentation/ExecutionStrategyInstrumentationContext.java b/src/main/java/graphql/execution/instrumentation/ExecutionStrategyInstrumentationContext.java index 04fbceab81..7fc0a3e0d3 100644 --- a/src/main/java/graphql/execution/instrumentation/ExecutionStrategyInstrumentationContext.java +++ b/src/main/java/graphql/execution/instrumentation/ExecutionStrategyInstrumentationContext.java @@ -36,7 +36,7 @@ static ExecutionStrategyInstrumentationContext nonNullCtx(ExecutionStrategyInstr @Internal ExecutionStrategyInstrumentationContext NOOP = new ExecutionStrategyInstrumentationContext() { @Override - public void onDispatched(CompletableFuture result) { + public void onDispatched() { } @Override diff --git a/src/main/java/graphql/execution/instrumentation/Instrumentation.java b/src/main/java/graphql/execution/instrumentation/Instrumentation.java index 77c4c6bd83..977422e565 100644 --- a/src/main/java/graphql/execution/instrumentation/Instrumentation.java +++ b/src/main/java/graphql/execution/instrumentation/Instrumentation.java @@ -1,8 +1,8 @@ package graphql.execution.instrumentation; -import graphql.DeprecatedAt; import graphql.ExecutionInput; import graphql.ExecutionResult; +import graphql.ExperimentalApi; import graphql.PublicSpi; import graphql.execution.ExecutionContext; import graphql.execution.instrumentation.parameters.InstrumentationCreateStateParameters; @@ -40,36 +40,6 @@ */ @PublicSpi public interface Instrumentation { - - /** - * This will be called just before execution to create an object that is given back to all instrumentation methods - * to allow them to have per execution request state - * - * @return a state object that is passed to each method - * - * @deprecated use {@link #createState(InstrumentationCreateStateParameters)} instead - */ - @Deprecated - @DeprecatedAt("2022-07-26") - default InstrumentationState createState() { - return null; - } - - /** - * This will be called just before execution to create an object that is given back to all instrumentation methods - * to allow them to have per execution request state - * - * @param parameters the parameters to this step - * - * @return a state object that is passed to each method - */ - @Deprecated - @DeprecatedAt("2023-08-25") - @Nullable - default InstrumentationState createState(InstrumentationCreateStateParameters parameters) { - return createState(); - } - /** * This will be called just before execution to create an object, in an asynchronous manner, that is given back to all instrumentation methods * to allow them to have per execution request state @@ -80,51 +50,34 @@ default InstrumentationState createState(InstrumentationCreateStateParameters pa */ @Nullable default CompletableFuture createStateAsync(InstrumentationCreateStateParameters parameters) { - return CompletableFuture.completedFuture(createState(parameters)); + InstrumentationState state = createState(parameters); + return state == null ? null : CompletableFuture.completedFuture(state); } /** - * This is called right at the start of query execution, and it's the first step in the instrumentation chain. + * This method is retained for backwards compatibility reasons so that previous {@link Instrumentation} implementations + * continue to work. The graphql-java code only called {@link #createStateAsync(InstrumentationCreateStateParameters)} + * but the default implementation calls back to this method. * * @param parameters the parameters to this step * - * @return a non null {@link InstrumentationContext} object that will be called back when the step ends - * - * @deprecated use {@link #beginExecution(InstrumentationExecutionParameters, InstrumentationState)} instead + * @return a state object that is passed to each method */ - @Deprecated - @DeprecatedAt("2022-07-26") - @NotNull - default InstrumentationContext beginExecution(InstrumentationExecutionParameters parameters) { - return noOp(); + @Nullable + default InstrumentationState createState(InstrumentationCreateStateParameters parameters) { + return null; } /** * This is called right at the start of query execution, and it's the first step in the instrumentation chain. * * @param parameters the parameters to this step - * @param state the state created during the call to {@link #createState(InstrumentationCreateStateParameters)} + * @param state the state created during the call to {@link #createStateAsync(InstrumentationCreateStateParameters)} * * @return a nullable {@link InstrumentationContext} object that will be called back when the step ends (assuming it's not null) */ @Nullable default InstrumentationContext beginExecution(InstrumentationExecutionParameters parameters, InstrumentationState state) { - return beginExecution(parameters.withNewState(state)); - } - - /** - * This is called just before a query is parsed. - * - * @param parameters the parameters to this step - * - * @return a non null {@link InstrumentationContext} object that will be called back when the step ends - * - * @deprecated use {@link #beginParse(InstrumentationExecutionParameters, InstrumentationState)} instead - */ - @Deprecated - @DeprecatedAt("2022-07-26") - @NotNull - default InstrumentationContext beginParse(InstrumentationExecutionParameters parameters) { return noOp(); } @@ -132,28 +85,12 @@ default InstrumentationContext beginParse(InstrumentationExecutionPara * This is called just before a query is parsed. * * @param parameters the parameters to this step - * @param state the state created during the call to {@link #createState(InstrumentationCreateStateParameters)} + * @param state the state created during the call to {@link #createStateAsync(InstrumentationCreateStateParameters)} * * @return a nullable {@link InstrumentationContext} object that will be called back when the step ends (assuming it's not null) */ @Nullable default InstrumentationContext beginParse(InstrumentationExecutionParameters parameters, InstrumentationState state) { - return beginParse(parameters.withNewState(state)); - } - - /** - * This is called just before the parsed query document is validated. - * - * @param parameters the parameters to this step - * - * @return a non null {@link InstrumentationContext} object that will be called back when the step ends - * - * @deprecated use {@link #beginValidation(InstrumentationValidationParameters, InstrumentationState)} instead - */ - @Deprecated - @DeprecatedAt("2022-07-26") - @NotNull - default InstrumentationContext> beginValidation(InstrumentationValidationParameters parameters) { return noOp(); } @@ -161,28 +98,12 @@ default InstrumentationContext> beginValidation(Instrument * This is called just before the parsed query document is validated. * * @param parameters the parameters to this step - * @param state the state created during the call to {@link #createState(InstrumentationCreateStateParameters)} + * @param state the state created during the call to {@link #createStateAsync(InstrumentationCreateStateParameters)} * * @return a nullable {@link InstrumentationContext} object that will be called back when the step ends (assuming it's not null) */ @Nullable default InstrumentationContext> beginValidation(InstrumentationValidationParameters parameters, InstrumentationState state) { - return beginValidation(parameters.withNewState(state)); - } - - /** - * This is called just before the execution of the query operation is started. - * - * @param parameters the parameters to this step - * - * @return a non null {@link InstrumentationContext} object that will be called back when the step ends - * - * @deprecated use {@link #beginExecuteOperation(InstrumentationExecuteOperationParameters, InstrumentationState)} instead - */ - @Deprecated - @DeprecatedAt("2022-07-26") - @NotNull - default InstrumentationContext beginExecuteOperation(InstrumentationExecuteOperationParameters parameters) { return noOp(); } @@ -190,60 +111,54 @@ default InstrumentationContext beginExecuteOperation(Instrument * This is called just before the execution of the query operation is started. * * @param parameters the parameters to this step - * @param state the state created during the call to {@link #createState(InstrumentationCreateStateParameters)} + * @param state the state created during the call to {@link #createStateAsync(InstrumentationCreateStateParameters)} * * @return a nullable {@link InstrumentationContext} object that will be called back when the step ends (assuming it's not null) */ @Nullable default InstrumentationContext beginExecuteOperation(InstrumentationExecuteOperationParameters parameters, InstrumentationState state) { - return beginExecuteOperation(parameters.withNewState(state)); + return noOp(); } /** * This is called each time an {@link graphql.execution.ExecutionStrategy} is invoked, which may be multiple times - * per query as the engine recursively descends down over the query. + * per query as the engine recursively descends over the query. * * @param parameters the parameters to this step + * @param state the state created during the call to {@link #createStateAsync(InstrumentationCreateStateParameters)} * - * @return a non null {@link ExecutionStrategyInstrumentationContext} object that will be called back when the step ends - * - * @deprecated use {@link #beginExecutionStrategy(InstrumentationExecutionStrategyParameters, InstrumentationState)} instead + * @return a nullable {@link ExecutionStrategyInstrumentationContext} object that will be called back when the step ends (assuming it's not null) */ - @Deprecated - @DeprecatedAt("2022-07-26") - @NotNull - default ExecutionStrategyInstrumentationContext beginExecutionStrategy(InstrumentationExecutionStrategyParameters parameters) { + @Nullable + default ExecutionStrategyInstrumentationContext beginExecutionStrategy(InstrumentationExecutionStrategyParameters parameters, InstrumentationState state) { return ExecutionStrategyInstrumentationContext.NOOP; } /** - * This is called each time an {@link graphql.execution.ExecutionStrategy} is invoked, which may be multiple times - * per query as the engine recursively descends down over the query. + * This is called each time an {@link graphql.execution.ExecutionStrategy} object resolution is called, which may be multiple times + * per query as the engine recursively descends over the query. * * @param parameters the parameters to this step - * @param state the state created during the call to {@link #createState(InstrumentationCreateStateParameters)} + * @param state the state created during the call to {@link #createStateAsync(InstrumentationCreateStateParameters)} * * @return a nullable {@link ExecutionStrategyInstrumentationContext} object that will be called back when the step ends (assuming it's not null) */ @Nullable - default ExecutionStrategyInstrumentationContext beginExecutionStrategy(InstrumentationExecutionStrategyParameters parameters, InstrumentationState state) { - return beginExecutionStrategy(parameters.withNewState(state)); + default ExecuteObjectInstrumentationContext beginExecuteObject(InstrumentationExecutionStrategyParameters parameters, InstrumentationState state) { + return ExecuteObjectInstrumentationContext.NOOP; } - /** - * This is called each time a subscription field produces a new reactive stream event value and it needs to be mapped over via the graphql field subselection. - * - * @param parameters the parameters to this step + * This is called just before a deferred field is resolved into a value. + *

+ * This is an EXPERIMENTAL instrumentation callback. The method signature will definitely change. * - * @return a non null {@link InstrumentationContext} object that will be called back when the step ends + * @param state the state created during the call to {@link #createStateAsync(InstrumentationCreateStateParameters)} * - * @deprecated use {@link #beginSubscribedFieldEvent(InstrumentationFieldParameters, InstrumentationState)} instead + * @return a nullable {@link ExecutionStrategyInstrumentationContext} object that will be called back when the step ends (assuming it's not null) */ - @Deprecated - @DeprecatedAt("2022-07-26") - @NotNull - default InstrumentationContext beginSubscribedFieldEvent(InstrumentationFieldParameters parameters) { + @ExperimentalApi + default InstrumentationContext beginDeferredField(InstrumentationState state) { return noOp(); } @@ -251,28 +166,12 @@ default InstrumentationContext beginSubscribedFieldEvent(Instru * This is called each time a subscription field produces a new reactive stream event value and it needs to be mapped over via the graphql field subselection. * * @param parameters the parameters to this step - * @param state the state created during the call to {@link #createState(InstrumentationCreateStateParameters)} + * @param state the state created during the call to {@link #createStateAsync(InstrumentationCreateStateParameters)} * * @return a nullable {@link InstrumentationContext} object that will be called back when the step ends (assuming it's not null) */ @Nullable default InstrumentationContext beginSubscribedFieldEvent(InstrumentationFieldParameters parameters, InstrumentationState state) { - return beginSubscribedFieldEvent(parameters.withNewState(state)); - } - - /** - * This is called just before a field is resolved into a value. - * - * @param parameters the parameters to this step - * - * @return a non null {@link InstrumentationContext} object that will be called back when the step ends - * - * @deprecated use {@link #beginField(InstrumentationFieldParameters, InstrumentationState)} instead - */ - @Deprecated - @DeprecatedAt("2022-07-26") - @NotNull - default InstrumentationContext beginField(InstrumentationFieldParameters parameters) { return noOp(); } @@ -280,58 +179,26 @@ default InstrumentationContext beginField(InstrumentationFieldP * This is called just before a field is resolved into a value. * * @param parameters the parameters to this step - * @param state the state created during the call to {@link #createState(InstrumentationCreateStateParameters)} + * @param state the state created during the call to {@link #createStateAsync(InstrumentationCreateStateParameters)} * * @return a nullable {@link InstrumentationContext} object that will be called back when the step ends (assuming it's not null) */ @Nullable - default InstrumentationContext beginField(InstrumentationFieldParameters parameters, InstrumentationState state) { - return beginField(parameters.withNewState(state)); - } - - /** - * This is called just before a field {@link DataFetcher} is invoked. - * - * @param parameters the parameters to this step - * - * @return a non null {@link InstrumentationContext} object that will be called back when the step ends - * - * @deprecated use {@link #beginFieldFetch(InstrumentationFieldFetchParameters, InstrumentationState)} instead - */ - @Deprecated - @DeprecatedAt("2022-07-26") - @NotNull - default InstrumentationContext beginFieldFetch(InstrumentationFieldFetchParameters parameters) { + default InstrumentationContext beginFieldExecution(InstrumentationFieldParameters parameters, InstrumentationState state) { return noOp(); } + /** * This is called just before a field {@link DataFetcher} is invoked. * * @param parameters the parameters to this step - * @param state the state created during the call to {@link #createState(InstrumentationCreateStateParameters)} + * @param state the state created during the call to {@link #createStateAsync(InstrumentationCreateStateParameters)} * * @return a nullable {@link InstrumentationContext} object that will be called back when the step ends (assuming it's not null) */ @Nullable default InstrumentationContext beginFieldFetch(InstrumentationFieldFetchParameters parameters, InstrumentationState state) { - return beginFieldFetch(parameters.withNewState(state)); - } - - - /** - * This is called just before the complete field is started. - * - * @param parameters the parameters to this step - * - * @return a non null {@link InstrumentationContext} object that will be called back when the step ends - * - * @deprecated use {@link #beginFieldComplete(InstrumentationFieldCompleteParameters, InstrumentationState)} instead - */ - @Deprecated - @DeprecatedAt("2022-07-26") - @NotNull - default InstrumentationContext beginFieldComplete(InstrumentationFieldCompleteParameters parameters) { return noOp(); } @@ -339,28 +206,12 @@ default InstrumentationContext beginFieldComplete(Instrumentati * This is called just before the complete field is started. * * @param parameters the parameters to this step - * @param state the state created during the call to {@link #createState(InstrumentationCreateStateParameters)} + * @param state the state created during the call to {@link #createStateAsync(InstrumentationCreateStateParameters)} * * @return a nullable {@link InstrumentationContext} object that will be called back when the step ends (assuming it's not null) */ @Nullable - default InstrumentationContext beginFieldComplete(InstrumentationFieldCompleteParameters parameters, InstrumentationState state) { - return beginFieldComplete(parameters.withNewState(state)); - } - - /** - * This is called just before the complete field list is started. - * - * @param parameters the parameters to this step - * - * @return a non null {@link InstrumentationContext} object that will be called back when the step ends - * - * @deprecated use {@link #beginFieldListComplete(InstrumentationFieldCompleteParameters, InstrumentationState)} instead - */ - @Deprecated - @DeprecatedAt("2022-07-26") - @NotNull - default InstrumentationContext beginFieldListComplete(InstrumentationFieldCompleteParameters parameters) { + default InstrumentationContext beginFieldCompletion(InstrumentationFieldCompleteParameters parameters, InstrumentationState state) { return noOp(); } @@ -368,31 +219,13 @@ default InstrumentationContext beginFieldListComplete(Instrumen * This is called just before the complete field list is started. * * @param parameters the parameters to this step - * @param state the state created during the call to {@link #createState(InstrumentationCreateStateParameters)} + * @param state the state created during the call to {@link #createStateAsync(InstrumentationCreateStateParameters)} * * @return a nullable {@link InstrumentationContext} object that will be called back when the step ends (assuming it's not null) */ @Nullable - default InstrumentationContext beginFieldListComplete(InstrumentationFieldCompleteParameters parameters, InstrumentationState state) { - return beginFieldListComplete(parameters.withNewState(state)); - } - - /** - * This is called to instrument a {@link graphql.ExecutionInput} before it is used to parse, validate - * and execute a query, allowing you to adjust what query input parameters are used - * - * @param executionInput the execution input to be used - * @param parameters the parameters describing the field to be fetched - * - * @return a non null instrumented ExecutionInput, the default is to return to the same object - * - * @deprecated use {@link #instrumentExecutionInput(ExecutionInput, InstrumentationExecutionParameters, InstrumentationState)} instead - */ - @Deprecated - @DeprecatedAt("2022-07-26") - @NotNull - default ExecutionInput instrumentExecutionInput(ExecutionInput executionInput, InstrumentationExecutionParameters parameters) { - return executionInput; + default InstrumentationContext beginFieldListCompletion(InstrumentationFieldCompleteParameters parameters, InstrumentationState state) { + return noOp(); } /** @@ -401,30 +234,13 @@ default ExecutionInput instrumentExecutionInput(ExecutionInput executionInput, I * * @param executionInput the execution input to be used * @param parameters the parameters describing the field to be fetched - * @param state the state created during the call to {@link #createState(InstrumentationCreateStateParameters)} + * @param state the state created during the call to {@link #createStateAsync(InstrumentationCreateStateParameters)} * - * @return a non null instrumented ExecutionInput, the default is to return to the same object + * @return a non-null instrumented ExecutionInput, the default is to return to the same object */ @NotNull default ExecutionInput instrumentExecutionInput(ExecutionInput executionInput, InstrumentationExecutionParameters parameters, InstrumentationState state) { - return instrumentExecutionInput(executionInput, parameters.withNewState(state)); - } - - /** - * This is called to instrument a {@link graphql.language.Document} and variables before it is used allowing you to adjust the query AST if you so desire - * - * @param documentAndVariables the document and variables to be used - * @param parameters the parameters describing the execution - * - * @return a non null instrumented DocumentAndVariables, the default is to return to the same objects - * - * @deprecated use {@link #instrumentDocumentAndVariables(DocumentAndVariables, InstrumentationExecutionParameters, InstrumentationState)} instead - */ - @Deprecated - @DeprecatedAt("2022-07-26") - @NotNull - default DocumentAndVariables instrumentDocumentAndVariables(DocumentAndVariables documentAndVariables, InstrumentationExecutionParameters parameters) { - return documentAndVariables; + return executionInput; } /** @@ -432,31 +248,13 @@ default DocumentAndVariables instrumentDocumentAndVariables(DocumentAndVariables * * @param documentAndVariables the document and variables to be used * @param parameters the parameters describing the execution - * @param state the state created during the call to {@link #createState(InstrumentationCreateStateParameters)} + * @param state the state created during the call to {@link #createStateAsync(InstrumentationCreateStateParameters)} * - * @return a non null instrumented DocumentAndVariables, the default is to return to the same objects + * @return a non-null instrumented DocumentAndVariables, the default is to return to the same objects */ @NotNull default DocumentAndVariables instrumentDocumentAndVariables(DocumentAndVariables documentAndVariables, InstrumentationExecutionParameters parameters, InstrumentationState state) { - return instrumentDocumentAndVariables(documentAndVariables, parameters.withNewState(state)); - } - - /** - * This is called to instrument a {@link graphql.schema.GraphQLSchema} before it is used to parse, validate - * and execute a query, allowing you to adjust what types are used. - * - * @param schema the schema to be used - * @param parameters the parameters describing the field to be fetched - * - * @return a non null instrumented GraphQLSchema, the default is to return to the same object - * - * @deprecated use {@link #instrumentSchema(GraphQLSchema, InstrumentationExecutionParameters, InstrumentationState)} instead - */ - @Deprecated - @DeprecatedAt("2022-07-26") - @NotNull - default GraphQLSchema instrumentSchema(GraphQLSchema schema, InstrumentationExecutionParameters parameters) { - return schema; + return documentAndVariables; } /** @@ -465,31 +263,13 @@ default GraphQLSchema instrumentSchema(GraphQLSchema schema, InstrumentationExec * * @param schema the schema to be used * @param parameters the parameters describing the field to be fetched - * @param state the state created during the call to {@link #createState(InstrumentationCreateStateParameters)} + * @param state the state created during the call to {@link #createStateAsync(InstrumentationCreateStateParameters)} * - * @return a non null instrumented GraphQLSchema, the default is to return to the same object + * @return a non-null instrumented GraphQLSchema, the default is to return to the same object */ @NotNull default GraphQLSchema instrumentSchema(GraphQLSchema schema, InstrumentationExecutionParameters parameters, InstrumentationState state) { - return instrumentSchema(schema, parameters.withNewState(state)); - } - - /** - * This is called to instrument a {@link ExecutionContext} before it is used to execute a query, - * allowing you to adjust the base data used. - * - * @param executionContext the execution context to be used - * @param parameters the parameters describing the field to be fetched - * - * @return a non null instrumented ExecutionContext, the default is to return to the same object - * - * @deprecated use {@link #instrumentExecutionContext(ExecutionContext, InstrumentationExecutionParameters, InstrumentationState)} instead - */ - @Deprecated - @DeprecatedAt("2022-07-26") - @NotNull - default ExecutionContext instrumentExecutionContext(ExecutionContext executionContext, InstrumentationExecutionParameters parameters) { - return executionContext; + return schema; } /** @@ -498,34 +278,13 @@ default ExecutionContext instrumentExecutionContext(ExecutionContext executionCo * * @param executionContext the execution context to be used * @param parameters the parameters describing the field to be fetched - * @param state the state created during the call to {@link #createState(InstrumentationCreateStateParameters)} + * @param state the state created during the call to {@link #createStateAsync(InstrumentationCreateStateParameters)} * - * @return a non null instrumented ExecutionContext, the default is to return to the same object + * @return a non-null instrumented ExecutionContext, the default is to return to the same object */ @NotNull default ExecutionContext instrumentExecutionContext(ExecutionContext executionContext, InstrumentationExecutionParameters parameters, InstrumentationState state) { - return instrumentExecutionContext(executionContext, parameters.withNewState(state)); - } - - - /** - * This is called to instrument a {@link DataFetcher} just before it is used to fetch a field, allowing you - * to adjust what information is passed back or record information about specific data fetches. Note - * the same data fetcher instance maybe presented to you many times and that data fetcher - * implementations widely vary. - * - * @param dataFetcher the data fetcher about to be used - * @param parameters the parameters describing the field to be fetched - * - * @return a non null instrumented DataFetcher, the default is to return to the same object - * - * @deprecated use {@link #instrumentDataFetcher(DataFetcher, InstrumentationFieldFetchParameters, InstrumentationState)} instead - */ - @Deprecated - @DeprecatedAt("2022-07-26") - @NotNull - default DataFetcher instrumentDataFetcher(DataFetcher dataFetcher, InstrumentationFieldFetchParameters parameters) { - return dataFetcher; + return executionContext; } /** @@ -536,30 +295,13 @@ default DataFetcher instrumentDataFetcher(DataFetcher dataFetcher, Instrum * * @param dataFetcher the data fetcher about to be used * @param parameters the parameters describing the field to be fetched - * @param state the state created during the call to {@link #createState(InstrumentationCreateStateParameters)} + * @param state the state created during the call to {@link #createStateAsync(InstrumentationCreateStateParameters)} * - * @return a non null instrumented DataFetcher, the default is to return to the same object + * @return a non-null instrumented DataFetcher, the default is to return to the same object */ @NotNull default DataFetcher instrumentDataFetcher(DataFetcher dataFetcher, InstrumentationFieldFetchParameters parameters, InstrumentationState state) { - return instrumentDataFetcher(dataFetcher, parameters.withNewState(state)); - } - - /** - * This is called to allow instrumentation to instrument the execution result in some way - * - * @param executionResult {@link java.util.concurrent.CompletableFuture} of the result to instrument - * @param parameters the parameters to this step - * - * @return a new execution result completable future - * - * @deprecated use {@link #instrumentExecutionResult(ExecutionResult, InstrumentationExecutionParameters, InstrumentationState)} instead - */ - @Deprecated - @DeprecatedAt("2022-07-26") - @NotNull - default CompletableFuture instrumentExecutionResult(ExecutionResult executionResult, InstrumentationExecutionParameters parameters) { - return CompletableFuture.completedFuture(executionResult); + return dataFetcher; } /** @@ -567,12 +309,12 @@ default CompletableFuture instrumentExecutionResult(ExecutionRe * * @param executionResult {@link java.util.concurrent.CompletableFuture} of the result to instrument * @param parameters the parameters to this step - * @param state the state created during the call to {@link #createState(InstrumentationCreateStateParameters)} + * @param state the state created during the call to {@link #createStateAsync(InstrumentationCreateStateParameters)} * * @return a new execution result completable future */ @NotNull default CompletableFuture instrumentExecutionResult(ExecutionResult executionResult, InstrumentationExecutionParameters parameters, InstrumentationState state) { - return instrumentExecutionResult(executionResult, parameters.withNewState(state)); + return CompletableFuture.completedFuture(executionResult); } } diff --git a/src/main/java/graphql/execution/instrumentation/InstrumentationContext.java b/src/main/java/graphql/execution/instrumentation/InstrumentationContext.java index 2d9626a113..2bb52272f8 100644 --- a/src/main/java/graphql/execution/instrumentation/InstrumentationContext.java +++ b/src/main/java/graphql/execution/instrumentation/InstrumentationContext.java @@ -17,10 +17,8 @@ public interface InstrumentationContext { /** * This is invoked when the instrumentation step is initially dispatched - * - * @param result the result of the step as a completable future */ - void onDispatched(CompletableFuture result); + void onDispatched(); /** * This is invoked when the instrumentation step is fully completed diff --git a/src/main/java/graphql/execution/instrumentation/InstrumentationState.java b/src/main/java/graphql/execution/instrumentation/InstrumentationState.java index afea0f4afb..258c865474 100644 --- a/src/main/java/graphql/execution/instrumentation/InstrumentationState.java +++ b/src/main/java/graphql/execution/instrumentation/InstrumentationState.java @@ -1,12 +1,13 @@ package graphql.execution.instrumentation; import graphql.PublicSpi; +import graphql.execution.instrumentation.parameters.InstrumentationCreateStateParameters; /** * An {@link Instrumentation} implementation can create this as a stateful object that is then passed * to each instrumentation method, allowing state to be passed down with the request execution * - * @see Instrumentation#createState(graphql.execution.instrumentation.parameters.InstrumentationCreateStateParameters) + * @see Instrumentation#createStateAsync(InstrumentationCreateStateParameters) */ @PublicSpi public interface InstrumentationState { diff --git a/src/main/java/graphql/execution/instrumentation/NoContextChainedInstrumentation.java b/src/main/java/graphql/execution/instrumentation/NoContextChainedInstrumentation.java index 72e0783b0c..1928df84f0 100644 --- a/src/main/java/graphql/execution/instrumentation/NoContextChainedInstrumentation.java +++ b/src/main/java/graphql/execution/instrumentation/NoContextChainedInstrumentation.java @@ -11,6 +11,7 @@ import graphql.execution.instrumentation.parameters.InstrumentationValidationParameters; import graphql.language.Document; import graphql.validation.ValidationError; +import org.jetbrains.annotations.Nullable; import java.util.List; import java.util.function.BiConsumer; @@ -48,10 +49,7 @@ public NoContextChainedInstrumentation(Instrumentation... instrumentations) { } private T runAll(InstrumentationState state, BiConsumer stateConsumer) { - for (Instrumentation instrumentation : instrumentations) { - InstrumentationState specificState = getSpecificState(instrumentation, state); - stateConsumer.accept(instrumentation, specificState); - } + chainedConsume(state, stateConsumer); return null; } @@ -80,14 +78,19 @@ public ExecutionStrategyInstrumentationContext beginExecutionStrategy(Instrument return runAll(state, (instrumentation, specificState) -> instrumentation.beginExecutionStrategy(parameters, specificState)); } + @Override + public @Nullable ExecuteObjectInstrumentationContext beginExecuteObject(InstrumentationExecutionStrategyParameters parameters, InstrumentationState state) { + return runAll(state, (instrumentation, specificState) -> instrumentation.beginExecuteObject(parameters, specificState)); + } + @Override public InstrumentationContext beginSubscribedFieldEvent(InstrumentationFieldParameters parameters, InstrumentationState state) { return runAll(state, (instrumentation, specificState) -> instrumentation.beginSubscribedFieldEvent(parameters, specificState)); } @Override - public InstrumentationContext beginField(InstrumentationFieldParameters parameters, InstrumentationState state) { - return runAll(state, (instrumentation, specificState) -> instrumentation.beginField(parameters, specificState)); + public @Nullable InstrumentationContext beginFieldExecution(InstrumentationFieldParameters parameters, InstrumentationState state) { + return runAll(state, (instrumentation, specificState) -> instrumentation.beginFieldExecution(parameters, specificState)); } @Override @@ -96,13 +99,13 @@ public InstrumentationContext beginFieldFetch(InstrumentationFieldFetchP } @Override - public InstrumentationContext beginFieldComplete(InstrumentationFieldCompleteParameters parameters, InstrumentationState state) { - return runAll(state, (instrumentation, specificState) -> instrumentation.beginFieldComplete(parameters, specificState)); + public @Nullable InstrumentationContext beginFieldCompletion(InstrumentationFieldCompleteParameters parameters, InstrumentationState state) { + return runAll(state, (instrumentation, specificState) -> instrumentation.beginFieldCompletion(parameters, specificState)); } @Override - public InstrumentationContext beginFieldListComplete(InstrumentationFieldCompleteParameters parameters, InstrumentationState state) { - return runAll(state, (instrumentation, specificState) -> instrumentation.beginFieldListComplete(parameters, specificState)); + public @Nullable InstrumentationContext beginFieldListCompletion(InstrumentationFieldCompleteParameters parameters, InstrumentationState state) { + return runAll(state, (instrumentation, specificState) -> instrumentation.beginFieldListCompletion(parameters, specificState)); } // relies on the other methods from ChainedInstrumentation which this does not change diff --git a/src/main/java/graphql/execution/instrumentation/SimpleInstrumentation.java b/src/main/java/graphql/execution/instrumentation/SimpleInstrumentation.java index f35278c551..d2df536e75 100644 --- a/src/main/java/graphql/execution/instrumentation/SimpleInstrumentation.java +++ b/src/main/java/graphql/execution/instrumentation/SimpleInstrumentation.java @@ -1,6 +1,5 @@ package graphql.execution.instrumentation; -import graphql.DeprecatedAt; import graphql.PublicApi; /** @@ -12,8 +11,7 @@ * @deprecated use {@link SimplePerformantInstrumentation} instead as a base class. */ @PublicApi -@Deprecated -@DeprecatedAt(value = "2022-10-05") +@Deprecated(since = "2022-10-05") public class SimpleInstrumentation implements Instrumentation { /** diff --git a/src/main/java/graphql/execution/instrumentation/SimpleInstrumentationContext.java b/src/main/java/graphql/execution/instrumentation/SimpleInstrumentationContext.java index 2621314a56..68c70b214e 100644 --- a/src/main/java/graphql/execution/instrumentation/SimpleInstrumentationContext.java +++ b/src/main/java/graphql/execution/instrumentation/SimpleInstrumentationContext.java @@ -15,7 +15,7 @@ public class SimpleInstrumentationContext implements InstrumentationContext NO_OP = new InstrumentationContext() { @Override - public void onDispatched(CompletableFuture result) { + public void onDispatched() { } @Override @@ -49,21 +49,21 @@ public static InstrumentationContext nonNullCtx(InstrumentationContext } private final BiConsumer codeToRunOnComplete; - private final Consumer> codeToRunOnDispatch; + private final Runnable codeToRunOnDispatch; public SimpleInstrumentationContext() { this(null, null); } - private SimpleInstrumentationContext(Consumer> codeToRunOnDispatch, BiConsumer codeToRunOnComplete) { + private SimpleInstrumentationContext(Runnable codeToRunOnDispatch, BiConsumer codeToRunOnComplete) { this.codeToRunOnComplete = codeToRunOnComplete; this.codeToRunOnDispatch = codeToRunOnDispatch; } @Override - public void onDispatched(CompletableFuture result) { + public void onDispatched() { if (codeToRunOnDispatch != null) { - codeToRunOnDispatch.accept(result); + codeToRunOnDispatch.run(); } } @@ -83,7 +83,7 @@ public void onCompleted(T result, Throwable t) { * * @return an instrumentation context */ - public static SimpleInstrumentationContext whenDispatched(Consumer> codeToRun) { + public static SimpleInstrumentationContext whenDispatched(Runnable codeToRun) { return new SimpleInstrumentationContext<>(codeToRun, null); } @@ -101,13 +101,8 @@ public static SimpleInstrumentationContext whenCompleted(BiConsumer BiConsumer completeInstrumentationCtxCF( - InstrumentationContext instrumentationContext, CompletableFuture targetCF) { + InstrumentationContext instrumentationContext) { return (result, throwable) -> { - if (throwable != null) { - targetCF.completeExceptionally(throwable); - } else { - targetCF.complete(result); - } nonNullCtx(instrumentationContext).onCompleted(result, throwable); }; } diff --git a/src/main/java/graphql/execution/instrumentation/SimplePerformantInstrumentation.java b/src/main/java/graphql/execution/instrumentation/SimplePerformantInstrumentation.java index 8ad5f8eeff..dfffa5b729 100644 --- a/src/main/java/graphql/execution/instrumentation/SimplePerformantInstrumentation.java +++ b/src/main/java/graphql/execution/instrumentation/SimplePerformantInstrumentation.java @@ -46,16 +46,6 @@ public class SimplePerformantInstrumentation implements Instrumentation { */ public static final SimplePerformantInstrumentation INSTANCE = new SimplePerformantInstrumentation(); - @Override - public InstrumentationState createState() { - return assertShouldNeverHappen("The deprecated " + "createState" + " was called"); - } - - @Override - public @Nullable InstrumentationState createState(InstrumentationCreateStateParameters parameters) { - return null; - } - @Override public @Nullable CompletableFuture createStateAsync(InstrumentationCreateStateParameters parameters) { InstrumentationState state = createState(parameters); @@ -63,8 +53,8 @@ public InstrumentationState createState() { } @Override - public @NotNull InstrumentationContext beginExecution(InstrumentationExecutionParameters parameters) { - return assertShouldNeverHappen("The deprecated " + "beginExecution" + " was called"); + public @Nullable InstrumentationState createState(InstrumentationCreateStateParameters parameters) { + return null; } @Override @@ -72,49 +62,29 @@ public InstrumentationState createState() { return noOp(); } - @Override - public @NotNull InstrumentationContext beginParse(InstrumentationExecutionParameters parameters) { - return assertShouldNeverHappen("The deprecated " + "beginParse" + " was called"); - } - @Override public @Nullable InstrumentationContext beginParse(InstrumentationExecutionParameters parameters, InstrumentationState state) { return noOp(); } - @Override - public @NotNull InstrumentationContext> beginValidation(InstrumentationValidationParameters parameters) { - return assertShouldNeverHappen("The deprecated " + "beginValidation" + " was called"); - } - @Override public @Nullable InstrumentationContext> beginValidation(InstrumentationValidationParameters parameters, InstrumentationState state) { return noOp(); } - @Override - public @NotNull InstrumentationContext beginExecuteOperation(InstrumentationExecuteOperationParameters parameters) { - return assertShouldNeverHappen("The deprecated " + "beginExecuteOperation" + " was called"); - } - @Override public @Nullable InstrumentationContext beginExecuteOperation(InstrumentationExecuteOperationParameters parameters, InstrumentationState state) { return noOp(); } - @Override - public @NotNull ExecutionStrategyInstrumentationContext beginExecutionStrategy(InstrumentationExecutionStrategyParameters parameters) { - return assertShouldNeverHappen("The deprecated " + "beginExecutionStrategy" + " was called"); - } - @Override public @Nullable ExecutionStrategyInstrumentationContext beginExecutionStrategy(InstrumentationExecutionStrategyParameters parameters, InstrumentationState state) { return ExecutionStrategyInstrumentationContext.NOOP; } @Override - public @NotNull InstrumentationContext beginSubscribedFieldEvent(InstrumentationFieldParameters parameters) { - return assertShouldNeverHappen("The deprecated " + "beginSubscribedFieldEvent" + " was called"); + public @Nullable ExecuteObjectInstrumentationContext beginExecuteObject(InstrumentationExecutionStrategyParameters parameters, InstrumentationState state) { + return ExecuteObjectInstrumentationContext.NOOP; } @Override @@ -123,100 +93,51 @@ public InstrumentationState createState() { } @Override - public @NotNull InstrumentationContext beginField(InstrumentationFieldParameters parameters) { - return assertShouldNeverHappen("The deprecated " + "beginField" + " was called"); - } - - @Override - public @Nullable InstrumentationContext beginField(InstrumentationFieldParameters parameters, InstrumentationState state) { + public @Nullable InstrumentationContext beginFieldExecution(InstrumentationFieldParameters parameters, InstrumentationState state) { return noOp(); } - @Override - public @NotNull InstrumentationContext beginFieldFetch(InstrumentationFieldFetchParameters parameters) { - return assertShouldNeverHappen("The deprecated " + "beginFieldFetch" + " was called"); - } - @Override public @Nullable InstrumentationContext beginFieldFetch(InstrumentationFieldFetchParameters parameters, InstrumentationState state) { return noOp(); } - @Override - public @NotNull InstrumentationContext beginFieldComplete(InstrumentationFieldCompleteParameters parameters) { - return assertShouldNeverHappen("The deprecated " + "beginFieldComplete" + " was called"); - } @Override - public @Nullable InstrumentationContext beginFieldComplete(InstrumentationFieldCompleteParameters parameters, InstrumentationState state) { + public @Nullable InstrumentationContext beginFieldCompletion(InstrumentationFieldCompleteParameters parameters, InstrumentationState state) { return noOp(); } @Override - public @NotNull InstrumentationContext beginFieldListComplete(InstrumentationFieldCompleteParameters parameters) { - return assertShouldNeverHappen("The deprecated " + "beginFieldListComplete" + " was called"); - } - - @Override - public @Nullable InstrumentationContext beginFieldListComplete(InstrumentationFieldCompleteParameters parameters, InstrumentationState state) { + public @Nullable InstrumentationContext beginFieldListCompletion(InstrumentationFieldCompleteParameters parameters, InstrumentationState state) { return noOp(); } - @Override - public @NotNull ExecutionInput instrumentExecutionInput(ExecutionInput executionInput, InstrumentationExecutionParameters parameters) { - return assertShouldNeverHappen("The deprecated " + "instrumentExecutionInput" + " was called"); - } - @Override public @NotNull ExecutionInput instrumentExecutionInput(ExecutionInput executionInput, InstrumentationExecutionParameters parameters, InstrumentationState state) { return executionInput; } - @Override - public @NotNull DocumentAndVariables instrumentDocumentAndVariables(DocumentAndVariables documentAndVariables, InstrumentationExecutionParameters parameters) { - return assertShouldNeverHappen("The deprecated " + "instrumentDocumentAndVariables" + " was called"); - } - @Override public @NotNull DocumentAndVariables instrumentDocumentAndVariables(DocumentAndVariables documentAndVariables, InstrumentationExecutionParameters parameters, InstrumentationState state) { return documentAndVariables; } - @Override - public @NotNull GraphQLSchema instrumentSchema(GraphQLSchema schema, InstrumentationExecutionParameters parameters) { - return assertShouldNeverHappen("The deprecated " + "instrumentSchema" + " was called"); - } - @Override public @NotNull GraphQLSchema instrumentSchema(GraphQLSchema schema, InstrumentationExecutionParameters parameters, InstrumentationState state) { return schema; } - @Override - public @NotNull ExecutionContext instrumentExecutionContext(ExecutionContext executionContext, InstrumentationExecutionParameters parameters) { - return assertShouldNeverHappen("The deprecated " + "instrumentExecutionContext" + " was called"); - } - @Override public @NotNull ExecutionContext instrumentExecutionContext(ExecutionContext executionContext, InstrumentationExecutionParameters parameters, InstrumentationState state) { return executionContext; } - @Override - public @NotNull DataFetcher instrumentDataFetcher(DataFetcher dataFetcher, InstrumentationFieldFetchParameters parameters) { - return assertShouldNeverHappen("The deprecated " + "instrumentDataFetcher" + " was called"); - } - @Override public @NotNull DataFetcher instrumentDataFetcher(DataFetcher dataFetcher, InstrumentationFieldFetchParameters parameters, InstrumentationState state) { return dataFetcher; } - @Override - public @NotNull CompletableFuture instrumentExecutionResult(ExecutionResult executionResult, InstrumentationExecutionParameters parameters) { - return assertShouldNeverHappen("The deprecated " + "instrumentExecutionResult" + " was called"); - } - @Override public @NotNull CompletableFuture instrumentExecutionResult(ExecutionResult executionResult, InstrumentationExecutionParameters parameters, InstrumentationState state) { return CompletableFuture.completedFuture(executionResult); diff --git a/src/main/java/graphql/execution/instrumentation/adapters/ExecuteObjectInstrumentationContextAdapter.java b/src/main/java/graphql/execution/instrumentation/adapters/ExecuteObjectInstrumentationContextAdapter.java new file mode 100644 index 0000000000..e69de29bb2 diff --git a/src/main/java/graphql/execution/instrumentation/dataloader/DataLoaderDispatcherInstrumentation.java b/src/main/java/graphql/execution/instrumentation/dataloader/DataLoaderDispatcherInstrumentation.java index 87c137e303..e69de29bb2 100644 --- a/src/main/java/graphql/execution/instrumentation/dataloader/DataLoaderDispatcherInstrumentation.java +++ b/src/main/java/graphql/execution/instrumentation/dataloader/DataLoaderDispatcherInstrumentation.java @@ -1,186 +0,0 @@ -package graphql.execution.instrumentation.dataloader; - -import graphql.ExecutionResult; -import graphql.ExecutionResultImpl; -import graphql.PublicApi; -import graphql.collect.ImmutableKit; -import graphql.execution.AsyncExecutionStrategy; -import graphql.execution.ExecutionContext; -import graphql.execution.ExecutionStrategy; -import graphql.execution.instrumentation.ExecutionStrategyInstrumentationContext; -import graphql.execution.instrumentation.InstrumentationContext; -import graphql.execution.instrumentation.InstrumentationState; -import graphql.execution.instrumentation.SimplePerformantInstrumentation; -import graphql.execution.instrumentation.parameters.InstrumentationCreateStateParameters; -import graphql.execution.instrumentation.parameters.InstrumentationExecuteOperationParameters; -import graphql.execution.instrumentation.parameters.InstrumentationExecutionParameters; -import graphql.execution.instrumentation.parameters.InstrumentationExecutionStrategyParameters; -import graphql.execution.instrumentation.parameters.InstrumentationFieldFetchParameters; -import graphql.language.OperationDefinition; -import graphql.schema.DataFetcher; -import org.dataloader.DataLoader; -import org.dataloader.DataLoaderRegistry; -import org.dataloader.stats.Statistics; -import org.jetbrains.annotations.NotNull; -import org.jetbrains.annotations.Nullable; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.LinkedHashMap; -import java.util.Map; -import java.util.concurrent.CompletableFuture; - -import static graphql.execution.instrumentation.InstrumentationState.ofState; -import static graphql.execution.instrumentation.SimpleInstrumentationContext.noOp; - -/** - * This graphql {@link graphql.execution.instrumentation.Instrumentation} will dispatch - * all the contained {@link org.dataloader.DataLoader}s when each level of the graphql - * query is executed. - *

- * This allows you to use {@link org.dataloader.DataLoader}s in your {@link graphql.schema.DataFetcher}s - * to optimal loading of data. - *

- * A DataLoaderDispatcherInstrumentation will be automatically added to the {@link graphql.GraphQL} - * instrumentation list if one is not present. - * - * @see org.dataloader.DataLoader - * @see org.dataloader.DataLoaderRegistry - */ -@PublicApi -public class DataLoaderDispatcherInstrumentation extends SimplePerformantInstrumentation { - - private static final Logger log = LoggerFactory.getLogger(DataLoaderDispatcherInstrumentation.class); - - private final DataLoaderDispatcherInstrumentationOptions options; - - /** - * Creates a DataLoaderDispatcherInstrumentation with the default options - */ - public DataLoaderDispatcherInstrumentation() { - this(DataLoaderDispatcherInstrumentationOptions.newOptions()); - } - - /** - * Creates a DataLoaderDispatcherInstrumentation with the specified options - * - * @param options the options to control the behaviour - */ - public DataLoaderDispatcherInstrumentation(DataLoaderDispatcherInstrumentationOptions options) { - this.options = options; - } - - - @Override - public InstrumentationState createState(InstrumentationCreateStateParameters parameters) { - return new DataLoaderDispatcherInstrumentationState(log, parameters.getExecutionInput().getDataLoaderRegistry()); - } - - @Override - public @NotNull DataFetcher instrumentDataFetcher(DataFetcher dataFetcher, InstrumentationFieldFetchParameters parameters, InstrumentationState rawState) { - DataLoaderDispatcherInstrumentationState state = ofState(rawState); - if (state.isAggressivelyBatching()) { - return dataFetcher; - } - // - // currently only AsyncExecutionStrategy with DataLoader and hence this allows us to "dispatch" - // on every object if it's not using aggressive batching for other execution strategies - // which allows them to work if used. - return (DataFetcher) environment -> { - Object obj = dataFetcher.get(environment); - immediatelyDispatch(state); - return obj; - }; - } - - private void immediatelyDispatch(DataLoaderDispatcherInstrumentationState state) { - state.getApproach().dispatch(); - } - - @Override - public @Nullable InstrumentationContext beginExecuteOperation(InstrumentationExecuteOperationParameters parameters, InstrumentationState rawState) { - DataLoaderDispatcherInstrumentationState state = ofState(rawState); - // - // during #instrumentExecutionInput they could have enhanced the data loader registry - // so we grab it now just before the query operation gets started - // - DataLoaderRegistry finalRegistry = parameters.getExecutionContext().getDataLoaderRegistry(); - state.setDataLoaderRegistry(finalRegistry); - if (!isDataLoaderCompatibleExecution(parameters.getExecutionContext())) { - state.setAggressivelyBatching(false); - } - return noOp(); - } - - private boolean isDataLoaderCompatibleExecution(ExecutionContext executionContext) { - // - // Currently we only support aggressive batching for the AsyncExecutionStrategy. - // This may change in the future but this is the fix for now. - // - OperationDefinition.Operation operation = executionContext.getOperationDefinition().getOperation(); - ExecutionStrategy strategy = executionContext.getStrategy(operation); - return (strategy instanceof AsyncExecutionStrategy); - } - - @Override - public @Nullable ExecutionStrategyInstrumentationContext beginExecutionStrategy(InstrumentationExecutionStrategyParameters parameters, InstrumentationState rawState) { - DataLoaderDispatcherInstrumentationState state = ofState(rawState); - // - // if there are no data loaders, there is nothing to do - // - if (state.hasNoDataLoaders()) { - return ExecutionStrategyInstrumentationContext.NOOP; - } - return state.getApproach().beginExecutionStrategy(parameters, state.getState()); - } - - - @Override - public @Nullable InstrumentationContext beginFieldFetch(InstrumentationFieldFetchParameters parameters, InstrumentationState rawState) { - DataLoaderDispatcherInstrumentationState state = ofState(rawState); - // - // if there are no data loaders, there is nothing to do - // - if (state.hasNoDataLoaders()) { - return noOp(); - } - return state.getApproach().beginFieldFetch(parameters, state.getState()); - } - - @Override - public @NotNull CompletableFuture instrumentExecutionResult(ExecutionResult executionResult, InstrumentationExecutionParameters parameters, InstrumentationState rawState) { - if (!options.isIncludeStatistics()) { - return CompletableFuture.completedFuture(executionResult); - } - DataLoaderDispatcherInstrumentationState state = ofState(rawState); - Map currentExt = executionResult.getExtensions(); - Map statsMap = new LinkedHashMap<>(currentExt == null ? ImmutableKit.emptyMap() : currentExt); - Map dataLoaderStats = buildStatsMap(state); - statsMap.put("dataloader", dataLoaderStats); - - if (log.isDebugEnabled()) { - log.debug("Data loader stats : {}", dataLoaderStats); - } - - return CompletableFuture.completedFuture(new ExecutionResultImpl(executionResult.getData(), executionResult.getErrors(), statsMap)); - } - - private Map buildStatsMap(DataLoaderDispatcherInstrumentationState state) { - DataLoaderRegistry dataLoaderRegistry = state.getDataLoaderRegistry(); - Statistics allStats = dataLoaderRegistry.getStatistics(); - Map statsMap = new LinkedHashMap<>(); - statsMap.put("overall-statistics", allStats.toMap()); - - Map individualStatsMap = new LinkedHashMap<>(); - - for (String dlKey : dataLoaderRegistry.getKeys()) { - DataLoader dl = dataLoaderRegistry.getDataLoader(dlKey); - Statistics statistics = dl.getStatistics(); - individualStatsMap.put(dlKey, statistics.toMap()); - } - - statsMap.put("individual-statistics", individualStatsMap); - - return statsMap; - } -} diff --git a/src/main/java/graphql/execution/instrumentation/dataloader/DataLoaderDispatcherInstrumentationOptions.java b/src/main/java/graphql/execution/instrumentation/dataloader/DataLoaderDispatcherInstrumentationOptions.java deleted file mode 100644 index bde9c03bfe..0000000000 --- a/src/main/java/graphql/execution/instrumentation/dataloader/DataLoaderDispatcherInstrumentationOptions.java +++ /dev/null @@ -1,38 +0,0 @@ -package graphql.execution.instrumentation.dataloader; - -import graphql.PublicApi; - -/** - * The options that control the operation of {@link graphql.execution.instrumentation.dataloader.DataLoaderDispatcherInstrumentation} - */ -@PublicApi -public class DataLoaderDispatcherInstrumentationOptions { - - private final boolean includeStatistics; - - private DataLoaderDispatcherInstrumentationOptions(boolean includeStatistics) { - this.includeStatistics = includeStatistics; - } - - public static DataLoaderDispatcherInstrumentationOptions newOptions() { - return new DataLoaderDispatcherInstrumentationOptions(false); - } - - /** - * This will toggle the ability to include java-dataloader statistics into the extensions - * output of your query - * - * @param flag the switch to follow - * - * @return a new options object - */ - public DataLoaderDispatcherInstrumentationOptions includeStatistics(boolean flag) { - return new DataLoaderDispatcherInstrumentationOptions(flag); - } - - - public boolean isIncludeStatistics() { - return includeStatistics; - } - -} diff --git a/src/main/java/graphql/execution/instrumentation/dataloader/DataLoaderDispatcherInstrumentationState.java b/src/main/java/graphql/execution/instrumentation/dataloader/DataLoaderDispatcherInstrumentationState.java deleted file mode 100644 index 1d6a697fa1..0000000000 --- a/src/main/java/graphql/execution/instrumentation/dataloader/DataLoaderDispatcherInstrumentationState.java +++ /dev/null @@ -1,91 +0,0 @@ -package graphql.execution.instrumentation.dataloader; - -import graphql.Assert; -import graphql.Internal; -import graphql.PublicApi; -import graphql.execution.instrumentation.InstrumentationState; -import org.dataloader.DataLoader; -import org.dataloader.DataLoaderRegistry; -import org.slf4j.Logger; - -import java.util.concurrent.atomic.AtomicReference; -import java.util.function.Function; - -/** - * A base class that keeps track of whether aggressive batching can be used - */ -@PublicApi -public class DataLoaderDispatcherInstrumentationState implements InstrumentationState { - - @Internal - public static final DataLoaderRegistry EMPTY_DATALOADER_REGISTRY = new DataLoaderRegistry() { - - private static final String ERROR_MESSAGE = "You MUST set in your own DataLoaderRegistry to use data loader"; - - @Override - public DataLoaderRegistry register(String key, DataLoader dataLoader) { - return Assert.assertShouldNeverHappen(ERROR_MESSAGE); - } - - @Override - public DataLoader computeIfAbsent(final String key, - final Function> mappingFunction) { - return Assert.assertShouldNeverHappen(ERROR_MESSAGE); - } - - @Override - public DataLoaderRegistry unregister(String key) { - return Assert.assertShouldNeverHappen(ERROR_MESSAGE); - } - }; - - private final FieldLevelTrackingApproach approach; - private final AtomicReference dataLoaderRegistry; - private final InstrumentationState state; - private volatile boolean aggressivelyBatching = true; - private volatile boolean hasNoDataLoaders; - - public DataLoaderDispatcherInstrumentationState(Logger log, DataLoaderRegistry dataLoaderRegistry) { - this.dataLoaderRegistry = new AtomicReference<>(dataLoaderRegistry); - this.approach = new FieldLevelTrackingApproach(log, this::getDataLoaderRegistry); - this.state = approach.createState(); - hasNoDataLoaders = checkForNoDataLoader(dataLoaderRegistry); - } - - private boolean checkForNoDataLoader(DataLoaderRegistry dataLoaderRegistry) { - // - // if they have never set a dataloader into the execution input then we can optimize - // away the tracking code - // - return dataLoaderRegistry == EMPTY_DATALOADER_REGISTRY; - } - - boolean isAggressivelyBatching() { - return aggressivelyBatching; - } - - void setAggressivelyBatching(boolean aggressivelyBatching) { - this.aggressivelyBatching = aggressivelyBatching; - } - - FieldLevelTrackingApproach getApproach() { - return approach; - } - - DataLoaderRegistry getDataLoaderRegistry() { - return dataLoaderRegistry.get(); - } - - void setDataLoaderRegistry(DataLoaderRegistry newRegistry) { - dataLoaderRegistry.set(newRegistry); - hasNoDataLoaders = checkForNoDataLoader(newRegistry); - } - - boolean hasNoDataLoaders() { - return hasNoDataLoaders; - } - - InstrumentationState getState() { - return state; - } -} diff --git a/src/main/java/graphql/execution/instrumentation/dataloader/EmptyDataLoaderRegistryInstance.java b/src/main/java/graphql/execution/instrumentation/dataloader/EmptyDataLoaderRegistryInstance.java new file mode 100644 index 0000000000..8684ce6c38 --- /dev/null +++ b/src/main/java/graphql/execution/instrumentation/dataloader/EmptyDataLoaderRegistryInstance.java @@ -0,0 +1,30 @@ +package graphql.execution.instrumentation.dataloader; + +import graphql.Assert; +import org.dataloader.DataLoader; +import org.dataloader.DataLoaderRegistry; + +import java.util.function.Function; + +public class EmptyDataLoaderRegistryInstance { + public static final DataLoaderRegistry EMPTY_DATALOADER_REGISTRY = new DataLoaderRegistry() { + // + private static final String ERROR_MESSAGE = "You MUST set in your own DataLoaderRegistry to use data loader"; + + @Override + public DataLoaderRegistry register(String key, DataLoader dataLoader) { + return Assert.assertShouldNeverHappen(ERROR_MESSAGE); + } + + @Override + public DataLoader computeIfAbsent(final String key, + final Function> mappingFunction) { + return Assert.assertShouldNeverHappen(ERROR_MESSAGE); + } + + @Override + public DataLoaderRegistry unregister(String key) { + return Assert.assertShouldNeverHappen(ERROR_MESSAGE); + } + }; +} diff --git a/src/main/java/graphql/execution/instrumentation/dataloader/FallbackDataLoaderDispatchStrategy.java b/src/main/java/graphql/execution/instrumentation/dataloader/FallbackDataLoaderDispatchStrategy.java new file mode 100644 index 0000000000..dba4378046 --- /dev/null +++ b/src/main/java/graphql/execution/instrumentation/dataloader/FallbackDataLoaderDispatchStrategy.java @@ -0,0 +1,31 @@ +package graphql.execution.instrumentation.dataloader; + +import graphql.Internal; +import graphql.execution.DataLoaderDispatchStrategy; +import graphql.execution.ExecutionContext; +import graphql.schema.DataFetcher; + + +/** + * Used when the execution strategy is not an AsyncExecutionStrategy: simply dispatch always after each DF. + */ +@Internal +public class FallbackDataLoaderDispatchStrategy implements DataLoaderDispatchStrategy { + + private final ExecutionContext executionContext; + + public FallbackDataLoaderDispatchStrategy(ExecutionContext executionContext) { + this.executionContext = executionContext; + } + + + @Override + public DataFetcher modifyDataFetcher(DataFetcher dataFetcher) { + return (DataFetcher) environment -> { + Object obj = dataFetcher.get(environment); + executionContext.getDataLoaderRegistry().dispatchAll(); + return obj; + }; + + } +} diff --git a/src/main/java/graphql/execution/instrumentation/dataloader/FieldLevelTrackingApproach.java b/src/main/java/graphql/execution/instrumentation/dataloader/FieldLevelTrackingApproach.java deleted file mode 100644 index 7da689db9a..0000000000 --- a/src/main/java/graphql/execution/instrumentation/dataloader/FieldLevelTrackingApproach.java +++ /dev/null @@ -1,248 +0,0 @@ -package graphql.execution.instrumentation.dataloader; - -import graphql.Assert; -import graphql.ExecutionResult; -import graphql.Internal; -import graphql.execution.FieldValueInfo; -import graphql.execution.ResultPath; -import graphql.execution.instrumentation.ExecutionStrategyInstrumentationContext; -import graphql.execution.instrumentation.InstrumentationContext; -import graphql.execution.instrumentation.InstrumentationState; -import graphql.execution.instrumentation.parameters.InstrumentationExecutionStrategyParameters; -import graphql.execution.instrumentation.parameters.InstrumentationFieldFetchParameters; -import graphql.util.LockKit; -import org.dataloader.DataLoaderRegistry; -import org.slf4j.Logger; - -import java.util.LinkedHashSet; -import java.util.List; -import java.util.Set; -import java.util.concurrent.CompletableFuture; -import java.util.function.Supplier; - -/** - * This approach uses field level tracking to achieve its aims of making the data loader more efficient - */ -@Internal -public class FieldLevelTrackingApproach { - private final Supplier dataLoaderRegistrySupplier; - private final Logger log; - - private static class CallStack implements InstrumentationState { - - private final LockKit.ReentrantLock lock = new LockKit.ReentrantLock(); - - private final LevelMap expectedFetchCountPerLevel = new LevelMap(); - private final LevelMap fetchCountPerLevel = new LevelMap(); - private final LevelMap expectedStrategyCallsPerLevel = new LevelMap(); - private final LevelMap happenedStrategyCallsPerLevel = new LevelMap(); - private final LevelMap happenedOnFieldValueCallsPerLevel = new LevelMap(); - - private final Set dispatchedLevels = new LinkedHashSet<>(); - - CallStack() { - expectedStrategyCallsPerLevel.set(1, 1); - } - - void increaseExpectedFetchCount(int level, int count) { - expectedFetchCountPerLevel.increment(level, count); - } - - void increaseFetchCount(int level) { - fetchCountPerLevel.increment(level, 1); - } - - void increaseExpectedStrategyCalls(int level, int count) { - expectedStrategyCallsPerLevel.increment(level, count); - } - - void increaseHappenedStrategyCalls(int level) { - happenedStrategyCallsPerLevel.increment(level, 1); - } - - void increaseHappenedOnFieldValueCalls(int level) { - happenedOnFieldValueCallsPerLevel.increment(level, 1); - } - - boolean allStrategyCallsHappened(int level) { - return happenedStrategyCallsPerLevel.get(level) == expectedStrategyCallsPerLevel.get(level); - } - - boolean allOnFieldCallsHappened(int level) { - return happenedOnFieldValueCallsPerLevel.get(level) == expectedStrategyCallsPerLevel.get(level); - } - - boolean allFetchesHappened(int level) { - return fetchCountPerLevel.get(level) == expectedFetchCountPerLevel.get(level); - } - - @Override - public String toString() { - return "CallStack{" + - "expectedFetchCountPerLevel=" + expectedFetchCountPerLevel + - ", fetchCountPerLevel=" + fetchCountPerLevel + - ", expectedStrategyCallsPerLevel=" + expectedStrategyCallsPerLevel + - ", happenedStrategyCallsPerLevel=" + happenedStrategyCallsPerLevel + - ", happenedOnFieldValueCallsPerLevel=" + happenedOnFieldValueCallsPerLevel + - ", dispatchedLevels" + dispatchedLevels + - '}'; - } - - public boolean dispatchIfNotDispatchedBefore(int level) { - if (dispatchedLevels.contains(level)) { - Assert.assertShouldNeverHappen("level " + level + " already dispatched"); - return false; - } - dispatchedLevels.add(level); - return true; - } - - public void clearAndMarkCurrentLevelAsReady(int level) { - expectedFetchCountPerLevel.clear(); - fetchCountPerLevel.clear(); - expectedStrategyCallsPerLevel.clear(); - happenedStrategyCallsPerLevel.clear(); - happenedOnFieldValueCallsPerLevel.clear(); - dispatchedLevels.clear(); - - // make sure the level is ready - expectedFetchCountPerLevel.increment(level, 1); - expectedStrategyCallsPerLevel.increment(level, 1); - happenedStrategyCallsPerLevel.increment(level, 1); - } - } - - public FieldLevelTrackingApproach(Logger log, Supplier dataLoaderRegistrySupplier) { - this.dataLoaderRegistrySupplier = dataLoaderRegistrySupplier; - this.log = log; - } - - public InstrumentationState createState() { - return new CallStack(); - } - - ExecutionStrategyInstrumentationContext beginExecutionStrategy(InstrumentationExecutionStrategyParameters parameters, InstrumentationState rawState) { - CallStack callStack = (CallStack) rawState; - ResultPath path = parameters.getExecutionStrategyParameters().getPath(); - int parentLevel = path.getLevel(); - int curLevel = parentLevel + 1; - int fieldCount = parameters.getExecutionStrategyParameters().getFields().size(); - callStack.lock.runLocked(() -> { - callStack.increaseExpectedFetchCount(curLevel, fieldCount); - callStack.increaseHappenedStrategyCalls(curLevel); - }); - - return new ExecutionStrategyInstrumentationContext() { - @Override - public void onDispatched(CompletableFuture result) { - - } - - @Override - public void onCompleted(ExecutionResult result, Throwable t) { - - } - - @Override - public void onFieldValuesInfo(List fieldValueInfoList) { - boolean dispatchNeeded = callStack.lock.callLocked(() -> - handleOnFieldValuesInfo(fieldValueInfoList, callStack, curLevel) - ); - if (dispatchNeeded) { - dispatch(); - } - } - - @Override - public void onFieldValuesException() { - callStack.lock.runLocked(() -> - callStack.increaseHappenedOnFieldValueCalls(curLevel) - ); - } - }; - } - - // - // thread safety : called with synchronised(callStack) - // - private boolean handleOnFieldValuesInfo(List fieldValueInfos, CallStack callStack, int curLevel) { - callStack.increaseHappenedOnFieldValueCalls(curLevel); - int expectedStrategyCalls = getCountForList(fieldValueInfos); - callStack.increaseExpectedStrategyCalls(curLevel + 1, expectedStrategyCalls); - return dispatchIfNeeded(callStack, curLevel + 1); - } - - private int getCountForList(List fieldValueInfos) { - int result = 0; - for (FieldValueInfo fieldValueInfo : fieldValueInfos) { - if (fieldValueInfo.getCompleteValueType() == FieldValueInfo.CompleteValueType.OBJECT) { - result += 1; - } else if (fieldValueInfo.getCompleteValueType() == FieldValueInfo.CompleteValueType.LIST) { - result += getCountForList(fieldValueInfo.getFieldValueInfos()); - } - } - return result; - } - - - public InstrumentationContext beginFieldFetch(InstrumentationFieldFetchParameters parameters, InstrumentationState rawState) { - CallStack callStack = (CallStack) rawState; - ResultPath path = parameters.getEnvironment().getExecutionStepInfo().getPath(); - int level = path.getLevel(); - return new InstrumentationContext<>() { - - @Override - public void onDispatched(CompletableFuture result) { - boolean dispatchNeeded = callStack.lock.callLocked(() -> { - callStack.increaseFetchCount(level); - return dispatchIfNeeded(callStack, level); - }); - if (dispatchNeeded) { - dispatch(); - } - } - - @Override - public void onCompleted(Object result, Throwable t) { - } - }; - } - - - // - // thread safety : called with synchronised(callStack) - // - private boolean dispatchIfNeeded(CallStack callStack, int level) { - if (levelReady(callStack, level)) { - return callStack.dispatchIfNotDispatchedBefore(level); - } - return false; - } - - // - // thread safety : called with synchronised(callStack) - // - private boolean levelReady(CallStack callStack, int level) { - if (level == 1) { - // level 1 is special: there is only one strategy call and that's it - return callStack.allFetchesHappened(1); - } - if (levelReady(callStack, level - 1) && callStack.allOnFieldCallsHappened(level - 1) - && callStack.allStrategyCallsHappened(level) && callStack.allFetchesHappened(level)) { - return true; - } - return false; - } - - void dispatch() { - DataLoaderRegistry dataLoaderRegistry = getDataLoaderRegistry(); - if (log.isDebugEnabled()) { - log.debug("Dispatching data loaders ({})", dataLoaderRegistry.getKeys()); - } - dataLoaderRegistry.dispatchAll(); - } - - private DataLoaderRegistry getDataLoaderRegistry() { - return dataLoaderRegistrySupplier.get(); - } -} diff --git a/src/main/java/graphql/execution/instrumentation/dataloader/LevelMap.java b/src/main/java/graphql/execution/instrumentation/dataloader/LevelMap.java index 9e151ab40b..ddf46f643b 100644 --- a/src/main/java/graphql/execution/instrumentation/dataloader/LevelMap.java +++ b/src/main/java/graphql/execution/instrumentation/dataloader/LevelMap.java @@ -1,6 +1,7 @@ package graphql.execution.instrumentation.dataloader; import graphql.Internal; + import java.util.Arrays; /** @@ -62,6 +63,16 @@ public String toString() { return result.toString(); } + public String toString(int level) { + StringBuilder result = new StringBuilder(); + result.append("IntMap["); + for (int i = 1; i <= level; i++) { + result.append("level=").append(i).append(",count=").append(countsByLevel[i]).append(" "); + } + result.append("]"); + return result.toString(); + } + public void clear() { Arrays.fill(countsByLevel, 0); } diff --git a/src/main/java/graphql/execution/instrumentation/dataloader/PerLevelDataLoaderDispatchStrategy.java b/src/main/java/graphql/execution/instrumentation/dataloader/PerLevelDataLoaderDispatchStrategy.java new file mode 100644 index 0000000000..a407346954 --- /dev/null +++ b/src/main/java/graphql/execution/instrumentation/dataloader/PerLevelDataLoaderDispatchStrategy.java @@ -0,0 +1,237 @@ +package graphql.execution.instrumentation.dataloader; + +import graphql.Assert; +import graphql.Internal; +import graphql.execution.DataLoaderDispatchStrategy; +import graphql.execution.ExecutionContext; +import graphql.execution.ExecutionStrategyParameters; +import graphql.execution.FieldValueInfo; +import graphql.execution.MergedField; +import graphql.schema.DataFetcher; +import graphql.util.LockKit; +import org.dataloader.DataLoaderRegistry; + +import java.util.LinkedHashSet; +import java.util.List; +import java.util.Set; + +@Internal +public class PerLevelDataLoaderDispatchStrategy implements DataLoaderDispatchStrategy { + + private final CallStack callStack; + private final ExecutionContext executionContext; + + + private static class CallStack { + + private final LockKit.ReentrantLock lock = new LockKit.ReentrantLock(); + private final LevelMap expectedFetchCountPerLevel = new LevelMap(); + private final LevelMap fetchCountPerLevel = new LevelMap(); + private final LevelMap expectedStrategyCallsPerLevel = new LevelMap(); + private final LevelMap happenedStrategyCallsPerLevel = new LevelMap(); + private final LevelMap happenedOnFieldValueCallsPerLevel = new LevelMap(); + + private final Set dispatchedLevels = new LinkedHashSet<>(); + + public CallStack() { + expectedStrategyCallsPerLevel.set(1, 1); + } + + void increaseExpectedFetchCount(int level, int count) { + expectedFetchCountPerLevel.increment(level, count); + } + + void increaseFetchCount(int level) { + fetchCountPerLevel.increment(level, 1); + } + + void increaseExpectedStrategyCalls(int level, int count) { + expectedStrategyCallsPerLevel.increment(level, count); + } + + void increaseHappenedStrategyCalls(int level) { + happenedStrategyCallsPerLevel.increment(level, 1); + } + + void increaseHappenedOnFieldValueCalls(int level) { + happenedOnFieldValueCallsPerLevel.increment(level, 1); + } + + boolean allStrategyCallsHappened(int level) { + return happenedStrategyCallsPerLevel.get(level) == expectedStrategyCallsPerLevel.get(level); + } + + boolean allOnFieldCallsHappened(int level) { + return happenedOnFieldValueCallsPerLevel.get(level) == expectedStrategyCallsPerLevel.get(level); + } + + boolean allFetchesHappened(int level) { + return fetchCountPerLevel.get(level) == expectedFetchCountPerLevel.get(level); + } + + @Override + public String toString() { + return "CallStack{" + + "expectedFetchCountPerLevel=" + expectedFetchCountPerLevel + + ", fetchCountPerLevel=" + fetchCountPerLevel + + ", expectedStrategyCallsPerLevel=" + expectedStrategyCallsPerLevel + + ", happenedStrategyCallsPerLevel=" + happenedStrategyCallsPerLevel + + ", happenedOnFieldValueCallsPerLevel=" + happenedOnFieldValueCallsPerLevel + + ", dispatchedLevels" + dispatchedLevels + + '}'; + } + + + public boolean dispatchIfNotDispatchedBefore(int level) { + if (dispatchedLevels.contains(level)) { + Assert.assertShouldNeverHappen("level " + level + " already dispatched"); + return false; + } + dispatchedLevels.add(level); + return true; + } + } + + public PerLevelDataLoaderDispatchStrategy(ExecutionContext executionContext) { + this.callStack = new CallStack(); + this.executionContext = executionContext; + } + + @Override + public void deferredField(ExecutionContext executionContext, MergedField currentField) { + throw new UnsupportedOperationException("Data Loaders cannot be used to resolve deferred fields"); + } + + @Override + public void executionStrategy(ExecutionContext executionContext, ExecutionStrategyParameters parameters) { + int curLevel = parameters.getExecutionStepInfo().getPath().getLevel() + 1; + increaseCallCounts(curLevel, parameters); + } + + @Override + public void executionStrategyOnFieldValuesInfo(List fieldValueInfoList, ExecutionStrategyParameters parameters) { + int curLevel = parameters.getPath().getLevel() + 1; + onFieldValuesInfoDispatchIfNeeded(fieldValueInfoList, curLevel, parameters); + } + + public void executionStrategyOnFieldValuesException(Throwable t, ExecutionStrategyParameters executionStrategyParameters) { + int curLevel = executionStrategyParameters.getPath().getLevel() + 1; + callStack.lock.runLocked(() -> + callStack.increaseHappenedOnFieldValueCalls(curLevel) + ); + } + + + @Override + public void executeObject(ExecutionContext executionContext, ExecutionStrategyParameters parameters) { + int curLevel = parameters.getExecutionStepInfo().getPath().getLevel() + 1; + increaseCallCounts(curLevel, parameters); + } + + @Override + public void executeObjectOnFieldValuesInfo(List fieldValueInfoList, ExecutionStrategyParameters parameters) { + int curLevel = parameters.getPath().getLevel() + 1; + onFieldValuesInfoDispatchIfNeeded(fieldValueInfoList, curLevel, parameters); + } + + + @Override + public void executeObjectOnFieldValuesException(Throwable t, ExecutionStrategyParameters parameters) { + int curLevel = parameters.getPath().getLevel() + 1; + callStack.lock.runLocked(() -> + callStack.increaseHappenedOnFieldValueCalls(curLevel) + ); + } + + + private void increaseCallCounts(int curLevel, ExecutionStrategyParameters executionStrategyParameters) { + int fieldCount = executionStrategyParameters.getFields().size(); + callStack.lock.runLocked(() -> { + callStack.increaseExpectedFetchCount(curLevel, fieldCount); + callStack.increaseHappenedStrategyCalls(curLevel); + }); + } + + private void onFieldValuesInfoDispatchIfNeeded(List fieldValueInfoList, int curLevel, ExecutionStrategyParameters parameters) { + boolean dispatchNeeded = callStack.lock.callLocked(() -> + handleOnFieldValuesInfo(fieldValueInfoList, curLevel) + ); + if (dispatchNeeded) { + dispatch(curLevel); + } + } + + // +// thread safety: called with callStack.lock +// + private boolean handleOnFieldValuesInfo(List fieldValueInfos, int curLevel) { + callStack.increaseHappenedOnFieldValueCalls(curLevel); + int expectedStrategyCalls = getCountForList(fieldValueInfos); + callStack.increaseExpectedStrategyCalls(curLevel + 1, expectedStrategyCalls); + return dispatchIfNeeded(curLevel + 1); + } + + private int getCountForList(List fieldValueInfos) { + int result = 0; + for (FieldValueInfo fieldValueInfo : fieldValueInfos) { + if (fieldValueInfo.getCompleteValueType() == FieldValueInfo.CompleteValueType.OBJECT) { + result += 1; + } else if (fieldValueInfo.getCompleteValueType() == FieldValueInfo.CompleteValueType.LIST) { + result += getCountForList(fieldValueInfo.getFieldValueInfos()); + } + } + return result; + } + + + @Override + public void fieldFetched(ExecutionContext executionContext, + ExecutionStrategyParameters executionStrategyParameters, + DataFetcher dataFetcher, + Object fetchedValue) { + int level = executionStrategyParameters.getPath().getLevel(); + boolean dispatchNeeded = callStack.lock.callLocked(() -> { + callStack.increaseFetchCount(level); + return dispatchIfNeeded(level); + }); + if (dispatchNeeded) { + dispatch(level); + } + + } + + + // +// thread safety : called with callStack.lock +// + private boolean dispatchIfNeeded(int level) { + boolean ready = levelReady(level); + if (ready) { + return callStack.dispatchIfNotDispatchedBefore(level); + } + return false; + } + + // +// thread safety: called with callStack.lock +// + private boolean levelReady(int level) { + if (level == 1) { + // level 1 is special: there is only one strategy call and that's it + return callStack.allFetchesHappened(1); + } + if (levelReady(level - 1) && callStack.allOnFieldCallsHappened(level - 1) + && callStack.allStrategyCallsHappened(level) && callStack.allFetchesHappened(level)) { + + return true; + } + return false; + } + + void dispatch(int level) { + DataLoaderRegistry dataLoaderRegistry = executionContext.getDataLoaderRegistry(); + dataLoaderRegistry.dispatchAll(); + } + +} + diff --git a/src/main/java/graphql/execution/instrumentation/fieldvalidation/SimpleFieldValidation.java b/src/main/java/graphql/execution/instrumentation/fieldvalidation/SimpleFieldValidation.java index 89634d064d..9f0a340f19 100644 --- a/src/main/java/graphql/execution/instrumentation/fieldvalidation/SimpleFieldValidation.java +++ b/src/main/java/graphql/execution/instrumentation/fieldvalidation/SimpleFieldValidation.java @@ -40,11 +40,12 @@ public SimpleFieldValidation addRule(ResultPath fieldPath, BiFunction validateFields(FieldValidationEnvironment validationEnvironment) { List errors = new ArrayList<>(); - for (ResultPath fieldPath : rules.keySet()) { + for (Map.Entry>> entry : rules.entrySet()) { + ResultPath fieldPath = entry.getKey(); + BiFunction> ruleFunction = entry.getValue(); + List fieldAndArguments = validationEnvironment.getFieldsByPath().get(fieldPath); if (fieldAndArguments != null) { - BiFunction> ruleFunction = rules.get(fieldPath); - for (FieldAndArguments fieldAndArgument : fieldAndArguments) { Optional graphQLError = ruleFunction.apply(fieldAndArgument, validationEnvironment); graphQLError.ifPresent(errors::add); diff --git a/src/main/java/graphql/execution/instrumentation/parameters/InstrumentationExecuteOperationParameters.java b/src/main/java/graphql/execution/instrumentation/parameters/InstrumentationExecuteOperationParameters.java index f8a895657b..c2b3029732 100644 --- a/src/main/java/graphql/execution/instrumentation/parameters/InstrumentationExecuteOperationParameters.java +++ b/src/main/java/graphql/execution/instrumentation/parameters/InstrumentationExecuteOperationParameters.java @@ -1,6 +1,5 @@ package graphql.execution.instrumentation.parameters; -import graphql.DeprecatedAt; import graphql.PublicApi; import graphql.execution.ExecutionContext; import graphql.execution.instrumentation.Instrumentation; @@ -13,50 +12,13 @@ @PublicApi public class InstrumentationExecuteOperationParameters { private final ExecutionContext executionContext; - private final InstrumentationState instrumentationState; - public InstrumentationExecuteOperationParameters(ExecutionContext executionContext) { - this(executionContext, executionContext.getInstrumentationState()); - } - - private InstrumentationExecuteOperationParameters(ExecutionContext executionContext, InstrumentationState instrumentationState) { this.executionContext = executionContext; - this.instrumentationState = instrumentationState; } - /** - * Returns a cloned parameters object with the new state - * - * @param instrumentationState the new state for this parameters object - * - * @return a new parameters object with the new state - * - * @deprecated state is now passed in direct to instrumentation methods - */ - @Deprecated - @DeprecatedAt("2022-07-26") - public InstrumentationExecuteOperationParameters withNewState(InstrumentationState instrumentationState) { - return new InstrumentationExecuteOperationParameters(executionContext, instrumentationState); - } public ExecutionContext getExecutionContext() { return executionContext; } - /** - * Previously the instrumentation parameters had access to the state created via {@link Instrumentation#createState(InstrumentationCreateStateParameters)} but now - * to save object allocations, the state is passed directly into instrumentation methods - * - * @param for two - * - * @return the state created previously during a call to {@link Instrumentation#createState(InstrumentationCreateStateParameters)} - * - * @deprecated state is now passed in direct to instrumentation methods - */ - @Deprecated - @DeprecatedAt("2022-07-26") - public T getInstrumentationState() { - //noinspection unchecked - return (T) instrumentationState; - } } diff --git a/src/main/java/graphql/execution/instrumentation/parameters/InstrumentationExecutionParameters.java b/src/main/java/graphql/execution/instrumentation/parameters/InstrumentationExecutionParameters.java index 57ec489851..caa48ac7c5 100644 --- a/src/main/java/graphql/execution/instrumentation/parameters/InstrumentationExecutionParameters.java +++ b/src/main/java/graphql/execution/instrumentation/parameters/InstrumentationExecutionParameters.java @@ -1,6 +1,5 @@ package graphql.execution.instrumentation.parameters; -import graphql.DeprecatedAt; import graphql.ExecutionInput; import graphql.GraphQLContext; import graphql.PublicApi; @@ -22,34 +21,18 @@ public class InstrumentationExecutionParameters { private final Object context; private final GraphQLContext graphQLContext; private final Map variables; - private final InstrumentationState instrumentationState; private final GraphQLSchema schema; - public InstrumentationExecutionParameters(ExecutionInput executionInput, GraphQLSchema schema, InstrumentationState instrumentationState) { + public InstrumentationExecutionParameters(ExecutionInput executionInput, GraphQLSchema schema) { this.executionInput = executionInput; this.query = executionInput.getQuery(); this.operation = executionInput.getOperationName(); this.context = executionInput.getContext(); this.graphQLContext = executionInput.getGraphQLContext(); this.variables = executionInput.getVariables() != null ? executionInput.getVariables() : ImmutableKit.emptyMap(); - this.instrumentationState = instrumentationState; this.schema = schema; } - /** - * Returns a cloned parameters object with the new state - * - * @param instrumentationState the new state for this parameters object - * - * @return a new parameters object with the new state - * - * @deprecated state is now passed in direct to instrumentation methods - */ - @Deprecated - @DeprecatedAt("2022-07-26") - public InstrumentationExecutionParameters withNewState(InstrumentationState instrumentationState) { - return new InstrumentationExecutionParameters(this.getExecutionInput(), this.schema, instrumentationState); - } public ExecutionInput getExecutionInput() { return executionInput; @@ -70,8 +53,7 @@ public String getOperation() { * * @deprecated use {@link #getGraphQLContext()} instead */ - @Deprecated - @DeprecatedAt("2021-07-05") + @Deprecated(since = "2021-07-05") @SuppressWarnings({"unchecked", "TypeParameterUnusedInFormals"}) public T getContext() { return (T) context; @@ -85,23 +67,6 @@ public Map getVariables() { return variables; } - /** - * Previously the instrumentation parameters had access to the state created via {@link Instrumentation#createState(InstrumentationCreateStateParameters)} but now - * to save object allocations, the state is passed directly into instrumentation methods - * - * @param for two - * - * @return the state created previously during a call to {@link Instrumentation#createState(InstrumentationCreateStateParameters)} - * - * @deprecated state is now passed in direct to instrumentation methods - */ - @Deprecated - @DeprecatedAt("2022-07-26") - @SuppressWarnings("TypeParameterUnusedInFormals") - public T getInstrumentationState() { - //noinspection unchecked - return (T) instrumentationState; - } public GraphQLSchema getSchema() { return this.schema; diff --git a/src/main/java/graphql/execution/instrumentation/parameters/InstrumentationExecutionStrategyParameters.java b/src/main/java/graphql/execution/instrumentation/parameters/InstrumentationExecutionStrategyParameters.java index 7f1ec801b3..9c93c84d42 100644 --- a/src/main/java/graphql/execution/instrumentation/parameters/InstrumentationExecutionStrategyParameters.java +++ b/src/main/java/graphql/execution/instrumentation/parameters/InstrumentationExecutionStrategyParameters.java @@ -1,11 +1,8 @@ package graphql.execution.instrumentation.parameters; -import graphql.DeprecatedAt; import graphql.PublicApi; import graphql.execution.ExecutionContext; import graphql.execution.ExecutionStrategyParameters; -import graphql.execution.instrumentation.Instrumentation; -import graphql.execution.instrumentation.InstrumentationState; /** * Parameters sent to {@link graphql.execution.instrumentation.Instrumentation} methods @@ -15,32 +12,12 @@ public class InstrumentationExecutionStrategyParameters { private final ExecutionContext executionContext; private final ExecutionStrategyParameters executionStrategyParameters; - private final InstrumentationState instrumentationState; public InstrumentationExecutionStrategyParameters(ExecutionContext executionContext, ExecutionStrategyParameters executionStrategyParameters) { - this(executionContext, executionStrategyParameters, executionContext.getInstrumentationState()); - } - - private InstrumentationExecutionStrategyParameters(ExecutionContext executionContext, ExecutionStrategyParameters executionStrategyParameters, InstrumentationState instrumentationState) { this.executionContext = executionContext; this.executionStrategyParameters = executionStrategyParameters; - this.instrumentationState = instrumentationState; } - /** - * Returns a cloned parameters object with the new state - * - * @param instrumentationState the new state for this parameters object - * - * @return a new parameters object with the new state - * - * @deprecated state is now passed in direct to instrumentation methods - */ - @Deprecated - @DeprecatedAt("2022-07-26") - public InstrumentationExecutionStrategyParameters withNewState(InstrumentationState instrumentationState) { - return new InstrumentationExecutionStrategyParameters(executionContext, executionStrategyParameters, instrumentationState); - } public ExecutionContext getExecutionContext() { return executionContext; @@ -50,21 +27,4 @@ public ExecutionStrategyParameters getExecutionStrategyParameters() { return executionStrategyParameters; } - /** - * Previously the instrumentation parameters had access to the state created via {@link Instrumentation#createState(InstrumentationCreateStateParameters)} but now - * to save object allocations, the state is passed directly into instrumentation methods - * - * @param for two - * - * @return the state created previously during a call to {@link Instrumentation#createState(InstrumentationCreateStateParameters)} - * - * @deprecated state is now passed in direct to instrumentation methods - */ - @Deprecated - @DeprecatedAt("2022-07-26") - @SuppressWarnings("TypeParameterUnusedInFormals") - public T getInstrumentationState() { - //noinspection unchecked - return (T) instrumentationState; - } } diff --git a/src/main/java/graphql/execution/instrumentation/parameters/InstrumentationFieldCompleteParameters.java b/src/main/java/graphql/execution/instrumentation/parameters/InstrumentationFieldCompleteParameters.java index 254e72f47b..1687c3d149 100644 --- a/src/main/java/graphql/execution/instrumentation/parameters/InstrumentationFieldCompleteParameters.java +++ b/src/main/java/graphql/execution/instrumentation/parameters/InstrumentationFieldCompleteParameters.java @@ -1,6 +1,5 @@ package graphql.execution.instrumentation.parameters; -import graphql.DeprecatedAt; import graphql.PublicApi; import graphql.execution.ExecutionContext; import graphql.execution.ExecutionStepInfo; @@ -19,36 +18,15 @@ public class InstrumentationFieldCompleteParameters { private final ExecutionContext executionContext; private final Supplier executionStepInfo; private final Object fetchedValue; - private final InstrumentationState instrumentationState; private final ExecutionStrategyParameters executionStrategyParameters; public InstrumentationFieldCompleteParameters(ExecutionContext executionContext, ExecutionStrategyParameters executionStrategyParameters, Supplier executionStepInfo, Object fetchedValue) { - this(executionContext, executionStrategyParameters, executionStepInfo, fetchedValue, executionContext.getInstrumentationState()); - } - - InstrumentationFieldCompleteParameters(ExecutionContext executionContext, ExecutionStrategyParameters executionStrategyParameters, Supplier executionStepInfo, Object fetchedValue, InstrumentationState instrumentationState) { this.executionContext = executionContext; this.executionStrategyParameters = executionStrategyParameters; this.executionStepInfo = executionStepInfo; this.fetchedValue = fetchedValue; - this.instrumentationState = instrumentationState; } - /** - * Returns a cloned parameters object with the new state - * - * @param instrumentationState the new state for this parameters object - * - * @return a new parameters object with the new state - * - * @deprecated state is now passed in direct to instrumentation methods - */ - @Deprecated - @DeprecatedAt("2022-07-26") - public InstrumentationFieldCompleteParameters withNewState(InstrumentationState instrumentationState) { - return new InstrumentationFieldCompleteParameters( - this.executionContext, executionStrategyParameters, this.executionStepInfo, this.fetchedValue, instrumentationState); - } public ExecutionContext getExecutionContext() { @@ -63,8 +41,7 @@ public GraphQLFieldDefinition getField() { return getExecutionStepInfo().getFieldDefinition(); } - @Deprecated - @DeprecatedAt("2020-09-08") + @Deprecated(since = "2020-09-08") public ExecutionStepInfo getTypeInfo() { return getExecutionStepInfo(); } @@ -77,21 +54,4 @@ public Object getFetchedValue() { return fetchedValue; } - /** - * Previously the instrumentation parameters had access to the state created via {@link Instrumentation#createState(InstrumentationCreateStateParameters)} but now - * to save object allocations, the state is passed directly into instrumentation methods - * - * @param for two - * - * @return the state created previously during a call to {@link Instrumentation#createState(InstrumentationCreateStateParameters)} - * - * @deprecated state is now passed in direct to instrumentation methods - */ - @Deprecated - @DeprecatedAt("2022-07-26") - @SuppressWarnings("TypeParameterUnusedInFormals") - public T getInstrumentationState() { - //noinspection unchecked - return (T) instrumentationState; - } } diff --git a/src/main/java/graphql/execution/instrumentation/parameters/InstrumentationFieldFetchParameters.java b/src/main/java/graphql/execution/instrumentation/parameters/InstrumentationFieldFetchParameters.java index 6013214013..d8eedb100c 100644 --- a/src/main/java/graphql/execution/instrumentation/parameters/InstrumentationFieldFetchParameters.java +++ b/src/main/java/graphql/execution/instrumentation/parameters/InstrumentationFieldFetchParameters.java @@ -1,6 +1,5 @@ package graphql.execution.instrumentation.parameters; -import graphql.DeprecatedAt; import graphql.PublicApi; import graphql.execution.ExecutionContext; import graphql.execution.ExecutionStrategyParameters; @@ -26,32 +25,6 @@ public InstrumentationFieldFetchParameters(ExecutionContext getExecutionContext, this.trivialDataFetcher = trivialDataFetcher; } - private InstrumentationFieldFetchParameters(ExecutionContext getExecutionContext, Supplier environment, InstrumentationState instrumentationState, ExecutionStrategyParameters executionStrategyParameters, boolean trivialDataFetcher) { - super(getExecutionContext, () -> environment.get().getExecutionStepInfo(), instrumentationState); - this.environment = environment; - this.executionStrategyParameters = executionStrategyParameters; - this.trivialDataFetcher = trivialDataFetcher; - } - - /** - * Returns a cloned parameters object with the new state - * - * @param instrumentationState the new state for this parameters object - * - * @return a new parameters object with the new state - * - * @deprecated state is now passed in direct to instrumentation methods - */ - @Deprecated - @DeprecatedAt("2022-07-26") - @Override - public InstrumentationFieldFetchParameters withNewState(InstrumentationState instrumentationState) { - return new InstrumentationFieldFetchParameters( - this.getExecutionContext(), this.environment, - instrumentationState, executionStrategyParameters, trivialDataFetcher); - } - - public DataFetchingEnvironment getEnvironment() { return environment.get(); } diff --git a/src/main/java/graphql/execution/instrumentation/parameters/InstrumentationFieldParameters.java b/src/main/java/graphql/execution/instrumentation/parameters/InstrumentationFieldParameters.java index 070def45a1..6bbfcbe9e1 100644 --- a/src/main/java/graphql/execution/instrumentation/parameters/InstrumentationFieldParameters.java +++ b/src/main/java/graphql/execution/instrumentation/parameters/InstrumentationFieldParameters.java @@ -1,6 +1,5 @@ package graphql.execution.instrumentation.parameters; -import graphql.DeprecatedAt; import graphql.PublicApi; import graphql.execution.ExecutionContext; import graphql.execution.ExecutionStepInfo; @@ -17,35 +16,10 @@ public class InstrumentationFieldParameters { private final ExecutionContext executionContext; private final Supplier executionStepInfo; - private final InstrumentationState instrumentationState; - public InstrumentationFieldParameters(ExecutionContext executionContext, Supplier executionStepInfo) { - this(executionContext, executionStepInfo, executionContext.getInstrumentationState()); - } - - InstrumentationFieldParameters(ExecutionContext executionContext, Supplier executionStepInfo, InstrumentationState instrumentationState) { this.executionContext = executionContext; this.executionStepInfo = executionStepInfo; - this.instrumentationState = instrumentationState; - } - - /** - * Returns a cloned parameters object with the new state - * - * @param instrumentationState the new state for this parameters object - * - * @return a new parameters object with the new state - * - * @deprecated state is now passed in direct to instrumentation methods - */ - @Deprecated - @DeprecatedAt("2022-07-26") - public InstrumentationFieldParameters withNewState(InstrumentationState instrumentationState) { - return new InstrumentationFieldParameters( - this.executionContext, this.executionStepInfo, instrumentationState); } - - public ExecutionContext getExecutionContext() { return executionContext; } @@ -58,21 +32,4 @@ public ExecutionStepInfo getExecutionStepInfo() { return executionStepInfo.get(); } - /** - * Previously the instrumentation parameters had access to the state created via {@link Instrumentation#createState(InstrumentationCreateStateParameters)} but now - * to save object allocations, the state is passed directly into instrumentation methods - * - * @param for two - * - * @return the state created previously during a call to {@link Instrumentation#createState(InstrumentationCreateStateParameters)} - * - * @deprecated state is now passed in direct to instrumentation methods - */ - @Deprecated - @DeprecatedAt("2022-07-26") - @SuppressWarnings("TypeParameterUnusedInFormals") - public T getInstrumentationState() { - //noinspection unchecked - return (T) instrumentationState; - } } diff --git a/src/main/java/graphql/execution/instrumentation/parameters/InstrumentationValidationParameters.java b/src/main/java/graphql/execution/instrumentation/parameters/InstrumentationValidationParameters.java index a99619affa..0905875f4e 100644 --- a/src/main/java/graphql/execution/instrumentation/parameters/InstrumentationValidationParameters.java +++ b/src/main/java/graphql/execution/instrumentation/parameters/InstrumentationValidationParameters.java @@ -1,6 +1,5 @@ package graphql.execution.instrumentation.parameters; -import graphql.DeprecatedAt; import graphql.ExecutionInput; import graphql.PublicApi; import graphql.execution.instrumentation.Instrumentation; @@ -15,28 +14,11 @@ public class InstrumentationValidationParameters extends InstrumentationExecutionParameters { private final Document document; - public InstrumentationValidationParameters(ExecutionInput executionInput, Document document, GraphQLSchema schema, InstrumentationState instrumentationState) { - super(executionInput, schema, instrumentationState); + public InstrumentationValidationParameters(ExecutionInput executionInput, Document document, GraphQLSchema schema) { + super(executionInput, schema); this.document = document; } - /** - * Returns a cloned parameters object with the new state - * - * @param instrumentationState the new state for this parameters object - * - * @return a new parameters object with the new state - * - * @deprecated state is now passed in direct to instrumentation methods - */ - @Deprecated - @DeprecatedAt("2022-07-26") - @Override - public InstrumentationValidationParameters withNewState(InstrumentationState instrumentationState) { - return new InstrumentationValidationParameters( - this.getExecutionInput(), document, getSchema(), instrumentationState); - } - public Document getDocument() { return document; diff --git a/src/main/java/graphql/execution/instrumentation/tracing/TracingInstrumentation.java b/src/main/java/graphql/execution/instrumentation/tracing/TracingInstrumentation.java index 9750ec1475..daa67d4ea7 100644 --- a/src/main/java/graphql/execution/instrumentation/tracing/TracingInstrumentation.java +++ b/src/main/java/graphql/execution/instrumentation/tracing/TracingInstrumentation.java @@ -72,8 +72,8 @@ public TracingInstrumentation(Options options) { private final Options options; @Override - public @Nullable InstrumentationState createState(InstrumentationCreateStateParameters parameters) { - return new TracingSupport(options.includeTrivialDataFetchers); + public @Nullable CompletableFuture createStateAsync(InstrumentationCreateStateParameters parameters) { + return CompletableFuture.completedFuture(new TracingSupport(options.includeTrivialDataFetchers)); } @Override diff --git a/src/main/java/graphql/execution/preparsed/NoOpPreparsedDocumentProvider.java b/src/main/java/graphql/execution/preparsed/NoOpPreparsedDocumentProvider.java index 912b850db0..03a96776b6 100644 --- a/src/main/java/graphql/execution/preparsed/NoOpPreparsedDocumentProvider.java +++ b/src/main/java/graphql/execution/preparsed/NoOpPreparsedDocumentProvider.java @@ -4,6 +4,7 @@ import graphql.ExecutionInput; import graphql.Internal; +import java.util.concurrent.CompletableFuture; import java.util.function.Function; @Internal @@ -11,7 +12,7 @@ public class NoOpPreparsedDocumentProvider implements PreparsedDocumentProvider public static final NoOpPreparsedDocumentProvider INSTANCE = new NoOpPreparsedDocumentProvider(); @Override - public PreparsedDocumentEntry getDocument(ExecutionInput executionInput, Function parseAndValidateFunction) { - return parseAndValidateFunction.apply(executionInput); + public CompletableFuture getDocumentAsync(ExecutionInput executionInput, Function parseAndValidateFunction) { + return CompletableFuture.completedFuture(parseAndValidateFunction.apply(executionInput)); } } diff --git a/src/main/java/graphql/execution/preparsed/PreparsedDocumentProvider.java b/src/main/java/graphql/execution/preparsed/PreparsedDocumentProvider.java index a9d8f1e842..7aac05d09d 100644 --- a/src/main/java/graphql/execution/preparsed/PreparsedDocumentProvider.java +++ b/src/main/java/graphql/execution/preparsed/PreparsedDocumentProvider.java @@ -1,7 +1,6 @@ package graphql.execution.preparsed; -import graphql.DeprecatedAt; import graphql.ExecutionInput; import graphql.PublicSpi; @@ -13,24 +12,6 @@ */ @PublicSpi public interface PreparsedDocumentProvider { - /** - * This is called to get a "cached" pre-parsed query and if it's not present, then the "parseAndValidateFunction" - * can be called to parse and validate the query. - *

- * Note - the "parseAndValidateFunction" MUST be called if you don't have a per parsed version of the query because it not only parses - * and validates the query, it invokes {@link graphql.execution.instrumentation.Instrumentation} calls as well for parsing and validation. - * if you don't make a call back on this then these wont happen. - * - * @param executionInput The {@link graphql.ExecutionInput} containing the query - * @param parseAndValidateFunction If the query has not be pre-parsed, this function MUST be called to parse and validate it - * @return an instance of {@link PreparsedDocumentEntry} - *

- * @deprecated - use {@link #getDocumentAsync(ExecutionInput executionInput, Function parseAndValidateFunction)} - */ - @Deprecated - @DeprecatedAt("2021-12-06") - PreparsedDocumentEntry getDocument(ExecutionInput executionInput, Function parseAndValidateFunction); - /** * This is called to get a "cached" pre-parsed query and if it's not present, then the "parseAndValidateFunction" * can be called to parse and validate the query. @@ -43,9 +24,7 @@ public interface PreparsedDocumentProvider { * @param parseAndValidateFunction If the query has not be pre-parsed, this function MUST be called to parse and validate it * @return a promise to an {@link PreparsedDocumentEntry} */ - default CompletableFuture getDocumentAsync(ExecutionInput executionInput, Function parseAndValidateFunction) { - return CompletableFuture.completedFuture(getDocument(executionInput, parseAndValidateFunction)); - } + CompletableFuture getDocumentAsync(ExecutionInput executionInput, Function parseAndValidateFunction); } diff --git a/src/main/java/graphql/execution/preparsed/persisted/InMemoryPersistedQueryCache.java b/src/main/java/graphql/execution/preparsed/persisted/InMemoryPersistedQueryCache.java index d8f6dd458c..5226332b85 100644 --- a/src/main/java/graphql/execution/preparsed/persisted/InMemoryPersistedQueryCache.java +++ b/src/main/java/graphql/execution/preparsed/persisted/InMemoryPersistedQueryCache.java @@ -7,6 +7,7 @@ import java.util.HashMap; import java.util.Map; +import java.util.concurrent.CompletableFuture; import java.util.concurrent.ConcurrentHashMap; /** @@ -27,8 +28,8 @@ public Map getKnownQueries() { } @Override - public PreparsedDocumentEntry getPersistedQueryDocument(Object persistedQueryId, ExecutionInput executionInput, PersistedQueryCacheMiss onCacheMiss) throws PersistedQueryNotFound { - return cache.compute(persistedQueryId, (k, v) -> { + public CompletableFuture getPersistedQueryDocumentAsync(Object persistedQueryId, ExecutionInput executionInput, PersistedQueryCacheMiss onCacheMiss) throws PersistedQueryNotFound { + PreparsedDocumentEntry documentEntry = cache.compute(persistedQueryId, (k, v) -> { if (v != null) { return v; } @@ -45,6 +46,7 @@ public PreparsedDocumentEntry getPersistedQueryDocument(Object persistedQueryId, } return onCacheMiss.apply(queryText); }); + return CompletableFuture.completedFuture(documentEntry); } public static Builder newInMemoryPersistedQueryCache() { diff --git a/src/main/java/graphql/execution/preparsed/persisted/PersistedQueryCache.java b/src/main/java/graphql/execution/preparsed/persisted/PersistedQueryCache.java index 034e4b4ebb..7690280e78 100644 --- a/src/main/java/graphql/execution/preparsed/persisted/PersistedQueryCache.java +++ b/src/main/java/graphql/execution/preparsed/persisted/PersistedQueryCache.java @@ -1,6 +1,5 @@ package graphql.execution.preparsed.persisted; -import graphql.DeprecatedAt; import graphql.ExecutionInput; import graphql.PublicSpi; import graphql.execution.preparsed.PreparsedDocumentEntry; @@ -12,30 +11,6 @@ */ @PublicSpi public interface PersistedQueryCache { - - /** - * This is called to get a persisted query from cache. - *

- * If its present in cache then it must return a PreparsedDocumentEntry where {@link graphql.execution.preparsed.PreparsedDocumentEntry#getDocument()} - * is already parsed and validated. This will be passed onto the graphql engine as is. - *

- * If it's a valid query id but its no present in cache, (cache miss) then you need to call back the "onCacheMiss" function with associated query text. - * This will be compiled and validated by the graphql engine and the PreparsedDocumentEntry will be passed back ready for you to cache it. - *

- * If it's not a valid query id then throw a {@link graphql.execution.preparsed.persisted.PersistedQueryNotFound} to indicate this. - * - * @param persistedQueryId the persisted query id - * @param executionInput the original execution input - * @param onCacheMiss the call back should it be a valid query id but it's not currently in the cache - * @return a parsed and validated PreparsedDocumentEntry where {@link graphql.execution.preparsed.PreparsedDocumentEntry#getDocument()} is set - * @throws graphql.execution.preparsed.persisted.PersistedQueryNotFound if the query id is not know at all and you have no query text - * - * @deprecated - use {@link #getPersistedQueryDocumentAsync(Object persistedQueryId, ExecutionInput executionInput, PersistedQueryCacheMiss onCacheMiss)} - */ - @Deprecated - @DeprecatedAt("2021-12-06") - PreparsedDocumentEntry getPersistedQueryDocument(Object persistedQueryId, ExecutionInput executionInput, PersistedQueryCacheMiss onCacheMiss) throws PersistedQueryNotFound; - /** * This is called to get a persisted query from cache. *

@@ -53,7 +28,5 @@ public interface PersistedQueryCache { * @return a promise to parsed and validated {@link PreparsedDocumentEntry} where {@link graphql.execution.preparsed.PreparsedDocumentEntry#getDocument()} is set * @throws graphql.execution.preparsed.persisted.PersistedQueryNotFound if the query id is not know at all and you have no query text */ - default CompletableFuture getPersistedQueryDocumentAsync(Object persistedQueryId, ExecutionInput executionInput, PersistedQueryCacheMiss onCacheMiss) throws PersistedQueryNotFound{ - return CompletableFuture.completedFuture(getPersistedQueryDocument(persistedQueryId, executionInput, onCacheMiss)); - } + CompletableFuture getPersistedQueryDocumentAsync(Object persistedQueryId, ExecutionInput executionInput, PersistedQueryCacheMiss onCacheMiss) throws PersistedQueryNotFound; } diff --git a/src/main/java/graphql/execution/preparsed/persisted/PersistedQuerySupport.java b/src/main/java/graphql/execution/preparsed/persisted/PersistedQuerySupport.java index 816e033f66..b65e20b78a 100644 --- a/src/main/java/graphql/execution/preparsed/persisted/PersistedQuerySupport.java +++ b/src/main/java/graphql/execution/preparsed/persisted/PersistedQuerySupport.java @@ -8,9 +8,11 @@ import graphql.execution.preparsed.PreparsedDocumentProvider; import java.util.Optional; +import java.util.concurrent.CompletableFuture; import java.util.function.Function; import static graphql.Assert.assertNotNull; +import static java.util.concurrent.CompletableFuture.completedFuture; /** * This abstract class forms the basis for persistent query support. Derived classes @@ -36,16 +38,16 @@ public PersistedQuerySupport(PersistedQueryCache persistedQueryCache) { } @Override - public PreparsedDocumentEntry getDocument(ExecutionInput executionInput, Function parseAndValidateFunction) { + public CompletableFuture getDocumentAsync(ExecutionInput executionInput, Function parseAndValidateFunction) { Optional queryIdOption = getPersistedQueryId(executionInput); - assertNotNull(queryIdOption, () -> String.format("The class %s MUST return a non null optional query id", this.getClass().getName())); + assertNotNull(queryIdOption, "The class %s MUST return a non null optional query id", this.getClass().getName()); try { if (queryIdOption.isPresent()) { Object persistedQueryId = queryIdOption.get(); - return persistedQueryCache.getPersistedQueryDocument(persistedQueryId, executionInput, (queryText) -> { + return persistedQueryCache.getPersistedQueryDocumentAsync(persistedQueryId, executionInput, (queryText) -> { // we have a miss and they gave us nothing - bah! - if (queryText == null || queryText.trim().length() == 0) { + if (queryText == null || queryText.isBlank()) { throw new PersistedQueryNotFound(persistedQueryId); } // validate the queryText hash before returning to the cache which we assume will set it @@ -57,9 +59,9 @@ public PreparsedDocumentEntry getDocument(ExecutionInput executionInput, Functio }); } // ok there is no query id - we assume the query is indeed ready to go as is - ie its not a persisted query - return parseAndValidateFunction.apply(executionInput); + return completedFuture(parseAndValidateFunction.apply(executionInput)); } catch (PersistedQueryError e) { - return mkMissingError(e); + return completedFuture(mkMissingError(e)); } } diff --git a/src/main/java/graphql/execution/reactive/NonBlockingMutexExecutor.java b/src/main/java/graphql/execution/reactive/NonBlockingMutexExecutor.java index 49f6fde6ee..38c6eb1238 100644 --- a/src/main/java/graphql/execution/reactive/NonBlockingMutexExecutor.java +++ b/src/main/java/graphql/execution/reactive/NonBlockingMutexExecutor.java @@ -2,6 +2,7 @@ import graphql.Internal; +import org.jetbrains.annotations.NotNull; import java.util.concurrent.Executor; import java.util.concurrent.atomic.AtomicReference; @@ -12,37 +13,38 @@ /** * Executor that provides mutual exclusion between the operations submitted to it, * without blocking. - * + *

* If an operation is submitted to this executor while no other operation is * running, it will run immediately. - * + *

* If an operation is submitted to this executor while another operation is * running, it will be added to a queue of operations to run, and the executor will * return. The thread currently running an operation will end up running the * operation just submitted. - * + *

* Operations submitted to this executor should run fast, as they can end up running * on other threads and interfere with the operation of other threads. - * + *

* This executor can also be used to address infinite recursion problems, as * operations submitted recursively will run sequentially. + *

* - * - * Inspired by Public Domain CC0 code at h - * https://github.com/jroper/reactive-streams-servlet/tree/master/reactive-streams-servlet/src/main/java/org/reactivestreams/servlet + * Inspired by Public Domain CC0 code at + * ... */ @Internal class NonBlockingMutexExecutor implements Executor { private final AtomicReference last = new AtomicReference<>(); @Override - public void execute(final Runnable command) { + public void execute(final @NotNull Runnable command) { final RunNode newNode = new RunNode(assertNotNull(command, () -> "Runnable must not be null")); final RunNode prevLast = last.getAndSet(newNode); - if (prevLast != null) + if (prevLast != null) { prevLast.lazySet(newNode); - else + } else { runAll(newNode); + } } private void reportFailure(final Thread runner, final Throwable thrown) { diff --git a/src/main/java/graphql/incremental/DeferPayload.java b/src/main/java/graphql/incremental/DeferPayload.java new file mode 100644 index 0000000000..58e5ac0994 --- /dev/null +++ b/src/main/java/graphql/incremental/DeferPayload.java @@ -0,0 +1,81 @@ +package graphql.incremental; + +import graphql.ExecutionResult; +import graphql.ExperimentalApi; +import graphql.GraphQLError; + +import javax.annotation.Nullable; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; + +/** + * Represents a defer payload + */ +@ExperimentalApi +public class DeferPayload extends IncrementalPayload { + private final Object data; + + private DeferPayload(Object data, List path, String label, List errors, Map extensions) { + super(path, label, errors, extensions); + this.data = data; + } + + /** + * @return the resolved data + * @param the type to cast the result to + */ + @Nullable + public T getData() { + //noinspection unchecked + return (T) this.data; + } + + /** + * @return a map of this payload that strictly follows the spec + */ + @Override + public Map toSpecification() { + Map map = new LinkedHashMap<>(super.toSpecification()); + + if (data != null) { + map.put("data", data); + } + + return map; + } + + /** + * @return a {@link DeferPayload.Builder} that can be used to create an instance of {@link DeferPayload} + */ + public static DeferPayload.Builder newDeferredItem() { + return new DeferPayload.Builder(); + } + + public static class Builder extends IncrementalPayload.Builder { + private Object data = null; + + public Builder data(Object data) { + this.data = data; + return this; + } + + public Builder from(DeferPayload deferredItem) { + super.from(deferredItem); + this.data = deferredItem.data; + return this; + } + + public Builder from(ExecutionResult executionResult) { + this.data = executionResult.getData(); + this.errors = executionResult.getErrors(); + this.extensions = executionResult.getExtensions(); + + return this; + } + + public DeferPayload build() { + return new DeferPayload(data, this.path, this.label, this.errors, this.extensions); + } + } +} diff --git a/src/main/java/graphql/incremental/DelayedIncrementalPartialResult.java b/src/main/java/graphql/incremental/DelayedIncrementalPartialResult.java new file mode 100644 index 0000000000..706944e528 --- /dev/null +++ b/src/main/java/graphql/incremental/DelayedIncrementalPartialResult.java @@ -0,0 +1,43 @@ +package graphql.incremental; + +import graphql.ExperimentalApi; + +import javax.annotation.Nullable; +import java.util.List; +import java.util.Map; + +/** + * Represents a result that is delivered asynchronously, after the initial {@link IncrementalExecutionResult}. + *

+ * Multiple defer and/or stream payloads (represented by {@link IncrementalPayload}) can be part of the same + * {@link DelayedIncrementalPartialResult} + */ +@ExperimentalApi +public interface DelayedIncrementalPartialResult { + /** + * @return a list of defer and/or stream payloads. + */ + @Nullable + List getIncremental(); + + /** + * Indicates whether the stream will continue emitting {@link DelayedIncrementalPartialResult}s after this one. + *

+ * The value returned by this method should be "true" for all but the last response in the stream. The value of this + * entry is `false` for the last response of the stream. + * + * @return "true" if there are more responses in the stream, "false" otherwise. + */ + boolean hasNext(); + + /** + * @return a map of extensions or null if there are none + */ + @Nullable + Map getExtensions(); + + /** + * @return a map of the result that strictly follows the spec + */ + Map toSpecification(); +} diff --git a/src/main/java/graphql/incremental/DelayedIncrementalPartialResultImpl.java b/src/main/java/graphql/incremental/DelayedIncrementalPartialResultImpl.java new file mode 100644 index 0000000000..461d658e7d --- /dev/null +++ b/src/main/java/graphql/incremental/DelayedIncrementalPartialResultImpl.java @@ -0,0 +1,87 @@ +package graphql.incremental; + +import graphql.ExperimentalApi; + +import java.util.Collections; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; + +@ExperimentalApi +public class DelayedIncrementalPartialResultImpl implements DelayedIncrementalPartialResult { + private final List incrementalItems; + private final boolean hasNext; + private final Map extensions; + + private DelayedIncrementalPartialResultImpl(Builder builder) { + this.incrementalItems = builder.incrementalItems; + this.hasNext = builder.hasNext; + this.extensions = builder.extensions; + } + + @Override + public List getIncremental() { + return this.incrementalItems; + } + + @Override + public boolean hasNext() { + return this.hasNext; + } + + @Override + public Map getExtensions() { + return this.extensions; + } + + @Override + public Map toSpecification() { + Map result = new LinkedHashMap<>(); + result.put("hasNext", hasNext); + + if (extensions != null) { + result.put("extensions", extensions); + } + + if(incrementalItems != null) { + result.put("incremental", incrementalItems.stream() + .map(IncrementalPayload::toSpecification) + .collect(Collectors.toList())); + } + + return result; + } + + /** + * @return a {@link Builder} that can be used to create an instance of {@link DelayedIncrementalPartialResultImpl} + */ + public static Builder newIncrementalExecutionResult() { + return new Builder(); + } + + public static class Builder { + private boolean hasNext = false; + private List incrementalItems = Collections.emptyList(); + private Map extensions; + + public Builder hasNext(boolean hasNext) { + this.hasNext = hasNext; + return this; + } + + public Builder incrementalItems(List incrementalItems) { + this.incrementalItems = incrementalItems; + return this; + } + + public Builder extensions(Map extensions) { + this.extensions = extensions; + return this; + } + + public DelayedIncrementalPartialResultImpl build() { + return new DelayedIncrementalPartialResultImpl(this); + } + } +} diff --git a/src/main/java/graphql/incremental/IncrementalExecutionResult.java b/src/main/java/graphql/incremental/IncrementalExecutionResult.java new file mode 100644 index 0000000000..b3dc1b929e --- /dev/null +++ b/src/main/java/graphql/incremental/IncrementalExecutionResult.java @@ -0,0 +1,111 @@ +package graphql.incremental; + +import graphql.ExecutionResult; +import graphql.ExperimentalApi; +import org.reactivestreams.Publisher; + +import javax.annotation.Nullable; +import java.util.List; + +/** + * A result that is part of an execution that includes incrementally delivered data (data has been deferred of streamed). + *

+ * For example, this query + *

+ * query {
+ *   person(id: "cGVvcGxlOjE=") {
+ *     ...HomeWorldFragment @defer(label: "homeWorldDefer")
+ *     name
+ *     films @stream(initialCount: 1, label: "filmsStream") {
+ *       title
+ *     }
+ *   }
+ * }
+ * fragment HomeWorldFragment on Person {
+ *   homeWorld {
+ *     name
+ *   }
+ * }
+ * 
+ * Could result on an incremental response with the following payloads (in JSON format here for simplicity). + *

+ * Response 1, the initial response does not contain any deferred or streamed results. + *

+ * {
+ *   "data": {
+ *     "person": {
+ *       "name": "Luke Skywalker",
+ *       "films": [{ "title": "A New Hope" }]
+ *     }
+ *   },
+ *   "hasNext": true
+ * }
+ * 
+ * + * Response 2, contains the defer payload and the first stream payload. + *
+ * {
+ *   "incremental": [
+ *     {
+ *       "label": "homeWorldDefer",
+ *       "path": ["person"],
+ *       "data": { "homeWorld": { "name": "Tatooine" } }
+ *     },
+ *     {
+ *       "label": "filmsStream",
+ *       "path": ["person", "films", 1],
+ *       "items": [{ "title": "The Empire Strikes Back" }]
+ *     }
+ *   ],
+ *   "hasNext": true
+ * }
+ * 
+ * + * Response 3, contains the final stream payload. Note how "hasNext" is "false", indicating this is the final response. + *
+ * {
+ *   "incremental": [
+ *     {
+ *       "label": "filmsStream",
+ *       "path": ["person", "films", 2],
+ *       "items": [{ "title": "Return of the Jedi" }]
+ *     }
+ *   ],
+ *   "hasNext": false
+ * }
+ * 
+ * + *

+ * This implementation is based on the state of Defer/Stream PR + * More specifically at the state of this + * commit + *

+ * The execution behaviour should match what we get from running Apollo Server 4.9.5 with graphql-js v17.0.0-alpha.2 + */ +@ExperimentalApi +public interface IncrementalExecutionResult extends ExecutionResult { + /** + * Indicates whether there are pending incremental data. + * @return "true" if there are incremental data, "false" otherwise. + */ + boolean hasNext(); + + /** + * Returns a list of defer and/or stream payloads that the execution engine decided (for whatever reason) to resolve at the same time as the initial payload. + *

+ * (...)this field may appear on both the initial and subsequent values. + *

+ * source + * + * @return a list of Stream and/or Defer payloads that were resolved at the same time as the initial payload. + */ + @Nullable + List getIncremental(); + + /** + * This {@link Publisher} will asynchronously emit events containing defer and/or stream payloads. + * + * @return a {@link Publisher} that clients can subscribe to receive incremental payloads. + */ + Publisher getIncrementalItemPublisher(); +} diff --git a/src/main/java/graphql/incremental/IncrementalExecutionResultImpl.java b/src/main/java/graphql/incremental/IncrementalExecutionResultImpl.java new file mode 100644 index 0000000000..765453260d --- /dev/null +++ b/src/main/java/graphql/incremental/IncrementalExecutionResultImpl.java @@ -0,0 +1,101 @@ +package graphql.incremental; + +import graphql.ExecutionResult; +import graphql.ExecutionResultImpl; +import graphql.ExperimentalApi; +import org.reactivestreams.Publisher; + +import javax.annotation.Nullable; +import java.util.LinkedHashMap; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; + +@ExperimentalApi +public class IncrementalExecutionResultImpl extends ExecutionResultImpl implements IncrementalExecutionResult { + private final boolean hasNext; + private final List incremental; + private final Publisher incrementalItemPublisher; + + private IncrementalExecutionResultImpl(Builder builder) { + super(builder); + this.hasNext = builder.hasNext; + this.incremental = builder.incremental; + this.incrementalItemPublisher = builder.incrementalItemPublisher; + } + + @Override + public boolean hasNext() { + return this.hasNext; + } + + @Nullable + @Override + public List getIncremental() { + return this.incremental; + } + + @Override + public Publisher getIncrementalItemPublisher() { + return incrementalItemPublisher; + } + + /** + * @return a {@link Builder} that can be used to create an instance of {@link IncrementalExecutionResultImpl} + */ + public static Builder newIncrementalExecutionResult() { + return new Builder(); + } + + public static Builder fromExecutionResult(ExecutionResult executionResult) { + return new Builder().from(executionResult); + } + + @Override + public Map toSpecification() { + Map map = new LinkedHashMap<>(super.toSpecification()); + map.put("hasNext", hasNext); + + if (this.incremental != null) { + map.put("incremental", + this.incremental.stream() + .map(IncrementalPayload::toSpecification) + .collect(Collectors.toCollection(LinkedList::new)) + ); + } + + return map; + } + + public static class Builder extends ExecutionResultImpl.Builder { + private boolean hasNext = true; + public List incremental; + private Publisher incrementalItemPublisher; + + public Builder hasNext(boolean hasNext) { + this.hasNext = hasNext; + return this; + } + + public Builder incremental(List incremental) { + this.incremental = incremental; + return this; + } + + public Builder incrementalItemPublisher(Publisher incrementalItemPublisher) { + this.incrementalItemPublisher = incrementalItemPublisher; + return this; + } + + public Builder from(IncrementalExecutionResult incrementalExecutionResult) { + super.from(incrementalExecutionResult); + this.hasNext = incrementalExecutionResult.hasNext(); + return this; + } + + public IncrementalExecutionResult build() { + return new IncrementalExecutionResultImpl(this); + } + } +} diff --git a/src/main/java/graphql/incremental/IncrementalPayload.java b/src/main/java/graphql/incremental/IncrementalPayload.java new file mode 100644 index 0000000000..76b1fbb405 --- /dev/null +++ b/src/main/java/graphql/incremental/IncrementalPayload.java @@ -0,0 +1,146 @@ +package graphql.incremental; + +import graphql.ExecutionResult; +import graphql.ExperimentalApi; +import graphql.GraphQLError; +import graphql.execution.ResultPath; + +import javax.annotation.Nullable; +import java.util.ArrayList; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; + +import static java.util.stream.Collectors.toList; + +/** + * Represents a payload that can be resolved after the initial response. + */ +@ExperimentalApi +public abstract class IncrementalPayload { + private final List path; + private final String label; + private final List errors; + private final transient Map extensions; + + protected IncrementalPayload(List path, String label, List errors, Map extensions) { + this.path = path; + this.errors = errors; + this.label = label; + this.extensions = extensions; + } + + /** + * @return list of field names and indices from root to the location of the corresponding `@defer` or `@stream` directive. + */ + public List getPath() { + return this.path; + } + + /** + * @return value derived from the corresponding `@defer` or `@stream` directive. + */ + @Nullable + public String getLabel() { + return label; + } + + /** + * @return a list of field errors encountered during execution. + */ + @Nullable + public List getErrors() { + return this.errors; + } + + /** + * @return a map of extensions or null if there are none + */ + @Nullable + public Map getExtensions() { + return this.extensions; + } + + protected Map toSpecification() { + Map result = new LinkedHashMap<>(); + + result.put("path", path); + + if (label != null) { + result.put("label", label); + } + + if (errors != null && !errors.isEmpty()) { + result.put("errors", errorsToSpec(errors)); + } + if (extensions != null) { + result.put("extensions", extensions); + } + return result; + } + + protected Object errorsToSpec(List errors) { + return errors.stream().map(GraphQLError::toSpecification).collect(toList()); + } + + + protected static abstract class Builder> { + protected List path; + protected String label; + protected List errors = new ArrayList<>(); + protected Map extensions; + + public T from(IncrementalPayload incrementalPayload) { + this.path = incrementalPayload.getPath(); + this.label = incrementalPayload.getLabel(); + if (incrementalPayload.getErrors() != null) { + this.errors = new ArrayList<>(incrementalPayload.getErrors()); + } + this.extensions = incrementalPayload.getExtensions(); + return (T) this; + } + + public T path(ResultPath path) { + if (path != null) { + this.path = path.toList(); + } + return (T) this; + } + + public T path(List path) { + this.path = path; + return (T) this; + } + + public T label(String label) { + this.label = label; + return (T) this; + } + + public T errors(List errors) { + this.errors = errors; + return (T) this; + } + + public Builder addErrors(List errors) { + this.errors.addAll(errors); + return this; + } + + public Builder addError(GraphQLError error) { + this.errors.add(error); + return this; + } + + public Builder extensions(Map extensions) { + this.extensions = extensions; + return this; + } + + public Builder addExtension(String key, Object value) { + this.extensions = (this.extensions == null ? new LinkedHashMap<>() : this.extensions); + this.extensions.put(key, value); + return this; + } + } +} diff --git a/src/main/java/graphql/incremental/StreamPayload.java b/src/main/java/graphql/incremental/StreamPayload.java new file mode 100644 index 0000000000..e8bdfcf85c --- /dev/null +++ b/src/main/java/graphql/incremental/StreamPayload.java @@ -0,0 +1,72 @@ +package graphql.incremental; + +import graphql.ExperimentalApi; +import graphql.GraphQLError; + +import javax.annotation.Nullable; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; + +/** + * Represents a stream payload + */ +@ExperimentalApi +public class StreamPayload extends IncrementalPayload { + private final List items; + + private StreamPayload(List items, List path, String label, List errors, Map extensions) { + super(path, label, errors, extensions); + this.items = items; + } + + /** + * @return the resolved list of items + * @param the type to cast the result to + */ + @Nullable + public List getItems() { + //noinspection unchecked + return (List) this.items; + } + + /** + * @return a map of this payload that strictly follows the spec + */ + @Override + public Map toSpecification() { + Map map = new LinkedHashMap<>(super.toSpecification()); + + if (items != null) { + map.put("items", items); + } + + return map; + } + + /** + * @return a {@link Builder} that can be used to create an instance of {@link StreamPayload} + */ + public static StreamPayload.Builder newStreamedItem() { + return new StreamPayload.Builder(); + } + + public static class Builder extends IncrementalPayload.Builder { + private List items = null; + + public Builder items(List items) { + this.items = items; + return this; + } + + public Builder from(StreamPayload streamedItem) { + super.from(streamedItem); + this.items = streamedItem.items; + return this; + } + + public StreamPayload build() { + return new StreamPayload(items, this.path, this.label, this.errors, this.extensions); + } + } +} diff --git a/src/main/java/graphql/introspection/GoodFaithIntrospection.java b/src/main/java/graphql/introspection/GoodFaithIntrospection.java new file mode 100644 index 0000000000..bd7285cbd1 --- /dev/null +++ b/src/main/java/graphql/introspection/GoodFaithIntrospection.java @@ -0,0 +1,177 @@ +package graphql.introspection; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableListMultimap; +import graphql.ErrorClassification; +import graphql.ExecutionResult; +import graphql.GraphQLContext; +import graphql.GraphQLError; +import graphql.PublicApi; +import graphql.execution.AbortExecutionException; +import graphql.execution.ExecutionContext; +import graphql.language.SourceLocation; +import graphql.normalized.ExecutableNormalizedField; +import graphql.normalized.ExecutableNormalizedOperation; +import graphql.schema.FieldCoordinates; + +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.concurrent.atomic.AtomicBoolean; + +import static graphql.normalized.ExecutableNormalizedOperationFactory.Options; +import static graphql.normalized.ExecutableNormalizedOperationFactory.createExecutableNormalizedOperation; +import static graphql.schema.FieldCoordinates.coordinates; + +/** + * This {@link graphql.execution.instrumentation.Instrumentation} ensure that a submitted introspection query is done in + * good faith. + *

+ * There are attack vectors where a crafted introspection query can cause the engine to spend too much time + * producing introspection data. This is especially true on large schemas with lots of types and fields. + *

+ * Schemas form a cyclic graph and hence it's possible to send in introspection queries that can reference those cycles + * and in large schemas this can be expensive and perhaps a "denial of service". + *

+ * This instrumentation only allows one __schema field or one __type field to be present, and it does not allow the `__Type` fields + * to form a cycle, i.e., that can only be present once. This allows the standard and common introspection queries to work + * so tooling such as graphiql can work. + */ +@PublicApi +public class GoodFaithIntrospection { + + /** + * Placing a boolean value under this key in the per request {@link GraphQLContext} will enable + * or disable Good Faith Introspection on that request. + */ + public static final String GOOD_FAITH_INTROSPECTION_DISABLED = "GOOD_FAITH_INTROSPECTION_DISABLED"; + + private static final AtomicBoolean ENABLED_STATE = new AtomicBoolean(true); + /** + * This is the maximum number of executable fields that can be in a good faith introspection query + */ + public static final int GOOD_FAITH_MAX_FIELDS_COUNT = 500; + /** + * This is the maximum depth a good faith introspection query can be + */ + public static final int GOOD_FAITH_MAX_DEPTH_COUNT = 20; + + /** + * @return true if good faith introspection is enabled + */ + public static boolean isEnabledJvmWide() { + return ENABLED_STATE.get(); + } + + /** + * This allows you to disable good faith introspection, which is on by default. + * + * @param flag the desired state + * + * @return the previous state + */ + public static boolean enabledJvmWide(boolean flag) { + return ENABLED_STATE.getAndSet(flag); + } + + private static final Map ALLOWED_FIELD_INSTANCES = Map.of( + coordinates("Query", "__schema"), 1 + , coordinates("Query", "__type"), 1 + + , coordinates("__Type", "fields"), 1 + , coordinates("__Type", "inputFields"), 1 + , coordinates("__Type", "interfaces"), 1 + , coordinates("__Type", "possibleTypes"), 1 + ); + + public static Optional checkIntrospection(ExecutionContext executionContext) { + if (isIntrospectionEnabled(executionContext.getGraphQLContext())) { + ExecutableNormalizedOperation operation; + try { + operation = mkOperation(executionContext); + } catch (AbortExecutionException e) { + BadFaithIntrospectionError error = BadFaithIntrospectionError.tooBigOperation(e.getMessage()); + return Optional.of(ExecutionResult.newExecutionResult().addError(error).build()); + } + ImmutableListMultimap coordinatesToENFs = operation.getCoordinatesToNormalizedFields(); + for (Map.Entry entry : ALLOWED_FIELD_INSTANCES.entrySet()) { + FieldCoordinates coordinates = entry.getKey(); + Integer allowSize = entry.getValue(); + ImmutableList normalizedFields = coordinatesToENFs.get(coordinates); + if (normalizedFields.size() > allowSize) { + BadFaithIntrospectionError error = BadFaithIntrospectionError.tooManyFields(coordinates.toString()); + return Optional.of(ExecutionResult.newExecutionResult().addError(error).build()); + } + } + } + return Optional.empty(); + } + + /** + * This makes an executable operation limited in size then which suits a good faith introspection query. This helps guard + * against malicious queries. + * + * @param executionContext the execution context + * + * @return an executable operation + */ + private static ExecutableNormalizedOperation mkOperation(ExecutionContext executionContext) throws AbortExecutionException { + Options options = Options.defaultOptions() + .maxFieldsCount(GOOD_FAITH_MAX_FIELDS_COUNT) + .maxChildrenDepth(GOOD_FAITH_MAX_DEPTH_COUNT) + .locale(executionContext.getLocale()) + .graphQLContext(executionContext.getGraphQLContext()); + + return createExecutableNormalizedOperation(executionContext.getGraphQLSchema(), + executionContext.getOperationDefinition(), + executionContext.getFragmentsByName(), + executionContext.getCoercedVariables(), + options); + + } + + private static boolean isIntrospectionEnabled(GraphQLContext graphQlContext) { + if (!isEnabledJvmWide()) { + return false; + } + return !graphQlContext.getOrDefault(GOOD_FAITH_INTROSPECTION_DISABLED, false); + } + + public static class BadFaithIntrospectionError implements GraphQLError { + private final String message; + + public static BadFaithIntrospectionError tooManyFields(String fieldCoordinate) { + return new BadFaithIntrospectionError(String.format("This request is not asking for introspection in good faith - %s is present too often!", fieldCoordinate)); + } + + public static BadFaithIntrospectionError tooBigOperation(String message) { + return new BadFaithIntrospectionError(String.format("This request is not asking for introspection in good faith - the query is too big: %s", message)); + } + + private BadFaithIntrospectionError(String message) { + this.message = message; + } + + @Override + public String getMessage() { + return message; + } + + @Override + public ErrorClassification getErrorType() { + return ErrorClassification.errorClassification("BadFaithIntrospection"); + } + + @Override + public List getLocations() { + return null; + } + + @Override + public String toString() { + return "BadFaithIntrospectionError{" + + "message='" + message + '\'' + + '}'; + } + } +} diff --git a/src/main/java/graphql/introspection/Introspection.java b/src/main/java/graphql/introspection/Introspection.java index d496e03702..ced6cbf818 100644 --- a/src/main/java/graphql/introspection/Introspection.java +++ b/src/main/java/graphql/introspection/Introspection.java @@ -3,9 +3,13 @@ import com.google.common.collect.ImmutableSet; import graphql.Assert; +import graphql.ExecutionResult; import graphql.GraphQLContext; import graphql.Internal; import graphql.PublicApi; +import graphql.execution.ExecutionContext; +import graphql.execution.MergedField; +import graphql.execution.MergedSelectionSet; import graphql.execution.ValuesResolver; import graphql.language.AstPrinter; import graphql.schema.FieldCoordinates; @@ -32,6 +36,7 @@ import graphql.schema.GraphQLSchema; import graphql.schema.GraphQLUnionType; import graphql.schema.InputValueWithState; +import org.jetbrains.annotations.NotNull; import java.util.ArrayList; import java.util.HashSet; @@ -39,7 +44,9 @@ import java.util.List; import java.util.Locale; import java.util.Map; +import java.util.Optional; import java.util.Set; +import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.Function; import java.util.stream.Collectors; @@ -58,8 +65,88 @@ import static graphql.schema.GraphQLTypeUtil.unwrapAllAs; import static graphql.schema.GraphQLTypeUtil.unwrapOne; +/** + * GraphQl has a unique capability called Introspection that allow + * consumers to inspect the system and discover the fields and types available and makes the system self documented. + *

+ * Some security recommendations such as OWASP + * recommend that introspection be disabled in production. The {@link Introspection#enabledJvmWide(boolean)} method can be used to disable + * introspection for the whole JVM or you can place {@link Introspection#INTROSPECTION_DISABLED} into the {@link GraphQLContext} of a request + * to disable introspection for that request. + */ @PublicApi public class Introspection { + + + /** + * Placing a boolean value under this key in the per request {@link GraphQLContext} will enable + * or disable Introspection on that request. + */ + public static final String INTROSPECTION_DISABLED = "INTROSPECTION_DISABLED"; + private static final AtomicBoolean INTROSPECTION_ENABLED_STATE = new AtomicBoolean(true); + + /** + * This static method will enable / disable Introspection at a JVM wide level. + * + * @param enabled the flag indicating the desired enabled state + * + * @return the previous state of enablement + */ + public static boolean enabledJvmWide(boolean enabled) { + return INTROSPECTION_ENABLED_STATE.getAndSet(enabled); + } + + /** + * @return true if Introspection is enabled at a JVM wide level or false otherwise + */ + public static boolean isEnabledJvmWide() { + return INTROSPECTION_ENABLED_STATE.get(); + } + + /** + * This will look in to the field selection set and see if there are introspection fields, + * and if there is,it checks if introspection should run, and if not it will return an errored {@link ExecutionResult} + * that can be returned to the user. + * + * @param mergedSelectionSet the fields to be executed + * @param executionContext the execution context in play + * + * @return an optional error result + */ + public static Optional isIntrospectionSensible(MergedSelectionSet mergedSelectionSet, ExecutionContext executionContext) { + GraphQLContext graphQLContext = executionContext.getGraphQLContext(); + + boolean isIntrospection = false; + for (String key : mergedSelectionSet.getKeys()) { + String fieldName = mergedSelectionSet.getSubField(key).getName(); + if (fieldName.equals(SchemaMetaFieldDef.getName()) + || fieldName.equals(TypeMetaFieldDef.getName())) { + if (!isIntrospectionEnabled(graphQLContext)) { + return mkDisabledError(mergedSelectionSet.getSubField(key)); + } + isIntrospection = true; + break; + } + } + if (isIntrospection) { + return GoodFaithIntrospection.checkIntrospection(executionContext); + } + return Optional.empty(); + } + + @NotNull + private static Optional mkDisabledError(MergedField schemaField) { + IntrospectionDisabledError error = new IntrospectionDisabledError(schemaField.getSingleField().getSourceLocation()); + return Optional.of(ExecutionResult.newExecutionResult().addError(error).build()); + } + + private static boolean isIntrospectionEnabled(GraphQLContext graphQlContext) { + if (!isEnabledJvmWide()) { + return false; + } + return !graphQlContext.getOrDefault(INTROSPECTION_DISABLED, false); + } + private static final Map> introspectionDataFetchers = new LinkedHashMap<>(); private static void register(GraphQLFieldsContainer parentType, String fieldName, IntrospectionDataFetcher introspectionDataFetcher) { @@ -636,6 +723,7 @@ public enum DirectiveLocation { return environment.getGraphQLSchema().getType(name); }; + // __typename is always available public static final IntrospectionDataFetcher TypeNameMetaFieldDefDataFetcher = environment -> simplePrint(environment.getParentType()); @Internal @@ -690,9 +778,14 @@ public static boolean isIntrospectionTypes(GraphQLNamedType type) { return introspectionTypes.contains(type.getName()); } + public static boolean isIntrospectionTypes(String typeName) { + return introspectionTypes.contains(typeName); + } + /** * This will look up a field definition by name, and understand that fields like __typename and __schema are special - * and take precedence in field resolution + * and take precedence in field resolution. If the parent type is a union type, then the only field allowed + * is `__typename`. * * @param schema the schema to use * @param parentType the type of the parent object @@ -702,6 +795,43 @@ public static boolean isIntrospectionTypes(GraphQLNamedType type) { */ public static GraphQLFieldDefinition getFieldDef(GraphQLSchema schema, GraphQLCompositeType parentType, String fieldName) { + GraphQLFieldDefinition fieldDefinition = getSystemFieldDef(schema, parentType, fieldName); + if (fieldDefinition != null) { + return fieldDefinition; + } + + assertTrue(parentType instanceof GraphQLFieldsContainer, "should not happen : parent type must be an object or interface %s", parentType); + GraphQLFieldsContainer fieldsContainer = (GraphQLFieldsContainer) parentType; + fieldDefinition = schema.getCodeRegistry().getFieldVisibility().getFieldDefinition(fieldsContainer, fieldName); + assertTrue(fieldDefinition != null, "Unknown field '%s' for type %s", fieldName, fieldsContainer.getName()); + return fieldDefinition; + } + + /** + * This will look up a field definition by name, and understand that fields like __typename and __schema are special + * and take precedence in field resolution + * + * @param schema the schema to use + * @param parentType the type of the parent {@link GraphQLFieldsContainer} + * @param fieldName the field to look up + * + * @return a field definition otherwise throws an assertion exception if it's null + */ + public static GraphQLFieldDefinition getFieldDefinition(GraphQLSchema schema, GraphQLFieldsContainer parentType, String fieldName) { + // this method is optimized to look up the most common case first (type for field) and hence suits the hot path of the execution engine + // and as a small benefit does not allocate any assertions unless it completely failed + GraphQLFieldDefinition fieldDefinition = schema.getCodeRegistry().getFieldVisibility().getFieldDefinition(parentType, fieldName); + if (fieldDefinition == null) { + // we look up system fields second because they are less likely to be the field in question + fieldDefinition = getSystemFieldDef(schema, parentType, fieldName); + if (fieldDefinition == null) { + Assert.assertShouldNeverHappen(String.format("Unknown field '%s' for type %s", fieldName, parentType.getName())); + } + } + return fieldDefinition; + } + + private static GraphQLFieldDefinition getSystemFieldDef(GraphQLSchema schema, GraphQLCompositeType parentType, String fieldName) { if (schema.getQueryType() == parentType) { if (fieldName.equals(schema.getIntrospectionSchemaFieldDefinition().getName())) { return schema.getIntrospectionSchemaFieldDefinition(); @@ -713,11 +843,6 @@ public static GraphQLFieldDefinition getFieldDef(GraphQLSchema schema, GraphQLCo if (fieldName.equals(schema.getIntrospectionTypenameFieldDefinition().getName())) { return schema.getIntrospectionTypenameFieldDefinition(); } - - assertTrue(parentType instanceof GraphQLFieldsContainer, () -> String.format("should not happen : parent type must be an object or interface %s", parentType)); - GraphQLFieldsContainer fieldsContainer = (GraphQLFieldsContainer) parentType; - GraphQLFieldDefinition fieldDefinition = schema.getCodeRegistry().getFieldVisibility().getFieldDefinition(fieldsContainer, fieldName); - assertTrue(fieldDefinition != null, () -> String.format("Unknown field '%s' for type %s", fieldName, fieldsContainer.getName())); - return fieldDefinition; + return null; } -} +} \ No newline at end of file diff --git a/src/main/java/graphql/introspection/IntrospectionDisabledError.java b/src/main/java/graphql/introspection/IntrospectionDisabledError.java new file mode 100644 index 0000000000..cbd7e077a3 --- /dev/null +++ b/src/main/java/graphql/introspection/IntrospectionDisabledError.java @@ -0,0 +1,35 @@ +package graphql.introspection; + +import graphql.ErrorClassification; +import graphql.ErrorType; +import graphql.GraphQLError; +import graphql.Internal; +import graphql.language.SourceLocation; + +import java.util.Collections; +import java.util.List; + +@Internal +public class IntrospectionDisabledError implements GraphQLError { + + private final List locations; + + public IntrospectionDisabledError(SourceLocation sourceLocation) { + locations = sourceLocation == null ? Collections.emptyList() : Collections.singletonList(sourceLocation); + } + + @Override + public String getMessage() { + return "Introspection has been disabled for this request"; + } + + @Override + public List getLocations() { + return locations; + } + + @Override + public ErrorClassification getErrorType() { + return ErrorClassification.errorClassification("IntrospectionDisabled"); + } +} diff --git a/src/main/java/graphql/language/AstPrinter.java b/src/main/java/graphql/language/AstPrinter.java index 218dabd59e..b48fa2075d 100644 --- a/src/main/java/graphql/language/AstPrinter.java +++ b/src/main/java/graphql/language/AstPrinter.java @@ -1,6 +1,5 @@ package graphql.language; -import graphql.AssertException; import graphql.PublicApi; import graphql.collect.ImmutableKit; @@ -10,7 +9,9 @@ import java.util.LinkedHashMap; import java.util.List; import java.util.Map; +import java.util.StringJoiner; +import static graphql.Assert.assertShouldNeverHappen; import static graphql.Assert.assertTrue; import static graphql.util.EscapeUtil.escapeJsonString; import static java.lang.String.valueOf; @@ -481,7 +482,7 @@ NodePrinter _findPrinter(Node node, Class startClass) { } clazz = clazz.getSuperclass(); } - throw new AssertException(String.format("We have a missing printer implementation for %s : report a bug!", clazz)); + return assertShouldNeverHappen("We have a missing printer implementation for %s : report a bug!", clazz); } private boolean isEmpty(List list) { @@ -489,7 +490,7 @@ private boolean isEmpty(List list) { } private boolean isEmpty(String s) { - return s == null || s.trim().length() == 0; + return s == null || s.isBlank(); } private List nvl(List list) { @@ -525,7 +526,7 @@ private String value(Value value) { } private String description(Node node) { - Description description = ((AbstractDescribedNode) node).getDescription(); + Description description = ((AbstractDescribedNode) node).getDescription(); if (description == null || description.getContent() == null || compactMode) { return ""; } @@ -534,13 +535,13 @@ private String description(Node node) { if (description.isMultiLine()) { s = "\"\"\"" + (startNewLine ? "" : "\n") + description.getContent() + "\n\"\"\"\n"; } else { - s = "\"" + description.getContent() + "\"\n"; + s = "\"" + escapeJsonString(description.getContent()) + "\"\n"; } return s; } private String directives(List directives) { - return join(nvl(directives), compactMode? "" : " "); + return join(nvl(directives), compactMode ? "" : " "); } private String join(List nodes, String delim) { @@ -563,7 +564,7 @@ private String joinTight(List nodes, String delim, String pr first = false; } else { boolean canButtTogether = lastNodeText.endsWith("}"); - if (! canButtTogether) { + if (!canButtTogether) { joined.append(delim); } } @@ -577,21 +578,13 @@ private String joinTight(List nodes, String delim, String pr } private String join(List nodes, String delim, String prefix, String suffix) { - StringBuilder joined = new StringBuilder(); - joined.append(prefix); + StringJoiner joiner = new StringJoiner(delim, prefix, suffix); - boolean first = true; for (T node : nodes) { - if (first) { - first = false; - } else { - joined.append(delim); - } - joined.append(this.node(node)); + joiner.add(node(node)); } - joined.append(suffix); - return joined.toString(); + return joiner.toString(); } private String spaced(String... args) { @@ -603,22 +596,15 @@ private String smooshed(String... args) { } private String join(String delim, String... args) { - StringBuilder builder = new StringBuilder(); + StringJoiner joiner = new StringJoiner(delim); - boolean first = true; for (final String arg : args) { - if (isEmpty(arg)) { - continue; - } - if (first) { - first = false; - } else { - builder.append(delim); + if (!isEmpty(arg)) { + joiner.add(arg); } - builder.append(arg); } - return builder.toString(); + return joiner.toString(); } String wrap(String start, String maybeString, String end) { @@ -628,7 +614,7 @@ String wrap(String start, String maybeString, String end) { } return ""; } - return new StringBuilder().append(start).append(maybeString).append(!isEmpty(end) ? end : "").toString(); + return start + maybeString + (!isEmpty(end) ? end : ""); } private String block(List nodes) { @@ -637,7 +623,7 @@ private String block(List nodes) { } if (compactMode) { String joinedNodes = joinTight(nodes, " ", "", ""); - return new StringBuilder().append("{").append(joinedNodes).append("}").toString(); + return "{" + joinedNodes + "}"; } return indent(new StringBuilder().append("{\n").append(join(nodes, "\n"))) + "\n}"; @@ -659,7 +645,7 @@ String wrap(String start, Node maybeNode, String end) { if (maybeNode == null) { return ""; } - return new StringBuilder().append(start).append(node(maybeNode)).append(isEmpty(end) ? "" : end).toString(); + return start + node(maybeNode) + (isEmpty(end) ? "" : end); } /** @@ -718,7 +704,8 @@ interface NodePrinter { /** * Allow subclasses to replace a printer for a specific {@link Node} - * @param nodeClass the class of the {@link Node} + * + * @param nodeClass the class of the {@link Node} * @param nodePrinter the custom {@link NodePrinter} */ void replacePrinter(Class nodeClass, NodePrinter nodePrinter) { diff --git a/src/main/java/graphql/language/Field.java b/src/main/java/graphql/language/Field.java index b034ca6bad..b4b0bcd97a 100644 --- a/src/main/java/graphql/language/Field.java +++ b/src/main/java/graphql/language/Field.java @@ -6,6 +6,7 @@ import graphql.Internal; import graphql.PublicApi; import graphql.collect.ImmutableKit; +import graphql.util.Interning; import graphql.util.TraversalControl; import graphql.util.TraverserContext; @@ -50,7 +51,7 @@ protected Field(String name, IgnoredChars ignoredChars, Map additionalData) { super(sourceLocation, comments, ignoredChars, additionalData); - this.name = name; + this.name = name == null ? null : Interning.intern(name); this.alias = alias; this.arguments = ImmutableList.copyOf(arguments); this.directives = ImmutableList.copyOf(directives); diff --git a/src/main/java/graphql/language/PrettyAstPrinter.java b/src/main/java/graphql/language/PrettyAstPrinter.java index 02a06bf004..c763a7b93d 100644 --- a/src/main/java/graphql/language/PrettyAstPrinter.java +++ b/src/main/java/graphql/language/PrettyAstPrinter.java @@ -10,6 +10,7 @@ import java.util.List; import java.util.Objects; import java.util.Optional; +import java.util.StringJoiner; import java.util.function.Function; import java.util.stream.Collectors; @@ -242,7 +243,7 @@ private boolean isEmpty(List list) { } private boolean isEmpty(String s) { - return s == null || s.trim().length() == 0; + return s == null || s.isBlank(); } private List nvl(List list) { @@ -258,7 +259,7 @@ private String outset(Node node) { } private String description(Node node) { - Description description = ((AbstractDescribedNode) node).getDescription(); + Description description = ((AbstractDescribedNode) node).getDescription(); if (description == null || description.getContent() == null) { return ""; } @@ -304,21 +305,13 @@ private String join(List nodes, String delim) { } private String join(List nodes, String delim, String prefix, String suffix) { - StringBuilder joined = new StringBuilder(); + StringJoiner joiner = new StringJoiner(delim, prefix, suffix); - joined.append(prefix); - boolean first = true; for (T node : nodes) { - if (first) { - first = false; - } else { - joined.append(delim); - } - joined.append(node(node)); + joiner.add(node(node)); } - joined.append(suffix); - return joined.toString(); + return joiner.toString(); } private String node(Node node) { @@ -338,22 +331,15 @@ private Function append(String suffix) { } private String join(String delim, String... args) { - StringBuilder builder = new StringBuilder(); + StringJoiner joiner = new StringJoiner(delim); - boolean first = true; for (final String arg : args) { - if (isEmpty(arg)) { - continue; + if (!isEmpty(arg)) { + joiner.add(arg); } - if (first) { - first = false; - } else { - builder.append(delim); - } - builder.append(arg); } - return builder.toString(); + return joiner.toString(); } private String block(List nodes, Node parentNode, String prefix, String suffix, String separatorMultiline, String separatorSingleLine, String whenEmpty) { @@ -381,8 +367,8 @@ private String block(List nodes, Node parentNode, String pre String blockStart = commentParser.getBeginningOfBlockComment(parentNode, prefix) .map(this::comment) - .map(commentText -> String.format("%s %s\n", prefix, commentText)) - .orElse(String.format("%s%s", prefix, (isMultiline ? "\n" : ""))); + .map(commentText -> prefix + " " + commentText + "\n") + .orElseGet(() -> prefix + (isMultiline ? "\n" : "")); String blockEndComments = comments(commentParser.getEndOfBlockComments(parentNode, suffix), "\n", ""); String blockEnd = (isMultiline ? "\n" : "") + suffix; @@ -422,10 +408,10 @@ public static class PrettyPrinterOptions { private static final PrettyPrinterOptions defaultOptions = new PrettyPrinterOptions(IndentType.SPACE, 2); private PrettyPrinterOptions(IndentType indentType, int indentWidth) { - this.indentText = String.join("", Collections.nCopies(indentWidth, indentType.character)); + this.indentText = String.join("", Collections.nCopies(indentWidth, indentType.character)); } - public static PrettyPrinterOptions defaultOptions() { + public static PrettyPrinterOptions defaultOptions() { return defaultOptions; } diff --git a/src/main/java/graphql/normalized/ENFMerger.java b/src/main/java/graphql/normalized/ENFMerger.java index 97d182a5f4..5150eee5a4 100644 --- a/src/main/java/graphql/normalized/ENFMerger.java +++ b/src/main/java/graphql/normalized/ENFMerger.java @@ -19,7 +19,12 @@ @Internal public class ENFMerger { - public static void merge(ExecutableNormalizedField parent, List childrenWithSameResultKey, GraphQLSchema schema) { + public static void merge( + ExecutableNormalizedField parent, + List childrenWithSameResultKey, + GraphQLSchema schema, + boolean deferSupport + ) { // they have all the same result key // we can only merge the fields if they have the same field name + arguments + all children are the same List> possibleGroupsToMerge = new ArrayList<>(); @@ -28,7 +33,7 @@ public static void merge(ExecutableNormalizedField parent, List group : possibleGroupsToMerge) { for (ExecutableNormalizedField fieldInGroup : group) { - if(field.getFieldName().equals(Introspection.TypeNameMetaFieldDef.getName())) { + if (field.getFieldName().equals(Introspection.TypeNameMetaFieldDef.getName())) { addToGroup = true; group.add(field); continue overPossibleGroups; @@ -63,8 +68,15 @@ && isFieldInSharedInterface(field, fieldInGroup, schema) // patching the first one to contain more objects, remove all others Iterator iterator = groupOfFields.iterator(); ExecutableNormalizedField first = iterator.next(); + while (iterator.hasNext()) { - parent.getChildren().remove(iterator.next()); + ExecutableNormalizedField next = iterator.next(); + parent.getChildren().remove(next); + + if (deferSupport) { + // Move defer executions from removed field into the merged field's entry + first.addDeferredExecutions(next.getDeferredExecutions()); + } } first.setObjectTypeNames(mergedObjects); } diff --git a/src/main/java/graphql/normalized/ExecutableNormalizedField.java b/src/main/java/graphql/normalized/ExecutableNormalizedField.java index 41ddd594b3..f9db04b00a 100644 --- a/src/main/java/graphql/normalized/ExecutableNormalizedField.java +++ b/src/main/java/graphql/normalized/ExecutableNormalizedField.java @@ -2,13 +2,14 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; -import graphql.Assert; +import graphql.ExperimentalApi; import graphql.Internal; import graphql.Mutable; import graphql.PublicApi; import graphql.collect.ImmutableKit; import graphql.introspection.Introspection; import graphql.language.Argument; +import graphql.normalized.incremental.NormalizedDeferredExecution; import graphql.schema.GraphQLFieldDefinition; import graphql.schema.GraphQLInterfaceType; import graphql.schema.GraphQLNamedOutputType; @@ -63,6 +64,8 @@ public class ExecutableNormalizedField { private final String fieldName; private final int level; + // Mutable List on purpose: it is modified after creation + private final LinkedHashSet deferredExecutions; private ExecutableNormalizedField(Builder builder) { this.alias = builder.alias; @@ -74,6 +77,7 @@ private ExecutableNormalizedField(Builder builder) { this.children = builder.children; this.level = builder.level; this.parent = builder.parent; + this.deferredExecutions = builder.deferredExecutions; } /** @@ -129,6 +133,7 @@ private ExecutableNormalizedField(Builder builder) { * NOT {@code Cat} or {@code Dog} as their respective implementations would say. * * @param schema - the graphql schema in play + * * @return true if the field is conditional */ public boolean isConditional(@NotNull GraphQLSchema schema) { @@ -178,7 +183,7 @@ public boolean hasChildren() { public GraphQLOutputType getType(GraphQLSchema schema) { List fieldDefinitions = getFieldDefinitions(schema); Set fieldTypes = fieldDefinitions.stream().map(fd -> simplePrint(fd.getType())).collect(toSet()); - Assert.assertTrue(fieldTypes.size() == 1, () -> "More than one type ... use getTypes"); + assertTrue(fieldTypes.size() == 1, () -> "More than one type ... use getTypes"); return fieldDefinitions.get(0).getType(); } @@ -195,7 +200,7 @@ public void forEachFieldDefinition(GraphQLSchema schema, Consumer String.format("No field %s found for type %s", fieldName, objectTypeName))); + consumer.accept(assertNotNull(type.getField(fieldName), "No field %s found for type %s", fieldName, objectTypeName)); } } @@ -218,7 +223,7 @@ private GraphQLFieldDefinition getOneFieldDefinition(GraphQLSchema schema) { String objectTypeName = objectTypeNames.iterator().next(); GraphQLObjectType type = (GraphQLObjectType) assertNotNull(schema.getType(objectTypeName)); - return assertNotNull(type.getField(fieldName), () -> String.format("No field %s found for type %s", fieldName, objectTypeName)); + return assertNotNull(type.getField(fieldName), "No field %s found for type %s", fieldName, objectTypeName); } private static GraphQLFieldDefinition resolveIntrospectionField(GraphQLSchema schema, Set objectTypeNames, String fieldName) { @@ -255,6 +260,16 @@ public void clearChildren() { this.children.clear(); } + @Internal + public void setDeferredExecutions(Collection deferredExecutions) { + this.deferredExecutions.clear(); + this.deferredExecutions.addAll(deferredExecutions); + } + + public void addDeferredExecutions(Collection deferredExecutions) { + this.deferredExecutions.addAll(deferredExecutions); + } + /** * All merged fields have the same name so this is the name of the {@link ExecutableNormalizedField}. *

@@ -364,7 +379,6 @@ public String getSingleObjectTypeName() { return objectTypeNames.iterator().next(); } - /** * @return a helper method show field details */ @@ -461,6 +475,16 @@ public ExecutableNormalizedField getParent() { return parent; } + /** + * @return the {@link NormalizedDeferredExecution}s associated with this {@link ExecutableNormalizedField}. + * + * @see NormalizedDeferredExecution + */ + @ExperimentalApi + public LinkedHashSet getDeferredExecutions() { + return deferredExecutions; + } + @Internal public void replaceParent(ExecutableNormalizedField newParent) { this.parent = newParent; @@ -588,6 +612,8 @@ public static class Builder { private LinkedHashMap resolvedArguments = new LinkedHashMap<>(); private ImmutableList astArguments = ImmutableKit.emptyList(); + private LinkedHashSet deferredExecutions = new LinkedHashSet<>(); + private Builder() { } @@ -601,6 +627,7 @@ private Builder(ExecutableNormalizedField existing) { this.children = new ArrayList<>(existing.children); this.level = existing.getLevel(); this.parent = existing.getParent(); + this.deferredExecutions = existing.getDeferredExecutions(); } public Builder clearObjectTypesNames() { @@ -656,6 +683,11 @@ public Builder parent(ExecutableNormalizedField parent) { return this; } + public Builder deferredExecutions(LinkedHashSet deferredExecutions) { + this.deferredExecutions = deferredExecutions; + return this; + } + public ExecutableNormalizedField build() { return new ExecutableNormalizedField(this); } diff --git a/src/main/java/graphql/normalized/ExecutableNormalizedOperation.java b/src/main/java/graphql/normalized/ExecutableNormalizedOperation.java index ce50c9931b..cfcda2746d 100644 --- a/src/main/java/graphql/normalized/ExecutableNormalizedOperation.java +++ b/src/main/java/graphql/normalized/ExecutableNormalizedOperation.java @@ -31,6 +31,8 @@ public class ExecutableNormalizedOperation { private final Map normalizedFieldToMergedField; private final Map normalizedFieldToQueryDirectives; private final ImmutableListMultimap coordinatesToNormalizedFields; + private final int operationFieldCount; + private final int operationDepth; public ExecutableNormalizedOperation( OperationDefinition.Operation operation, @@ -39,8 +41,9 @@ public ExecutableNormalizedOperation( ImmutableListMultimap fieldToNormalizedField, Map normalizedFieldToMergedField, Map normalizedFieldToQueryDirectives, - ImmutableListMultimap coordinatesToNormalizedFields - ) { + ImmutableListMultimap coordinatesToNormalizedFields, + int operationFieldCount, + int operationDepth) { this.operation = operation; this.operationName = operationName; this.topLevelFields = topLevelFields; @@ -48,6 +51,8 @@ public ExecutableNormalizedOperation( this.normalizedFieldToMergedField = normalizedFieldToMergedField; this.normalizedFieldToQueryDirectives = normalizedFieldToQueryDirectives; this.coordinatesToNormalizedFields = coordinatesToNormalizedFields; + this.operationFieldCount = operationFieldCount; + this.operationDepth = operationDepth; } /** @@ -64,6 +69,20 @@ public String getOperationName() { return operationName; } + /** + * @return This returns how many {@link ExecutableNormalizedField}s are in the operation. + */ + public int getOperationFieldCount() { + return operationFieldCount; + } + + /** + * @return This returns the depth of the operation + */ + public int getOperationDepth() { + return operationDepth; + } + /** * This multimap shows how a given {@link ExecutableNormalizedField} maps to a one or more field coordinate in the schema * diff --git a/src/main/java/graphql/normalized/ExecutableNormalizedOperationFactory.java b/src/main/java/graphql/normalized/ExecutableNormalizedOperationFactory.java index 1124c9b7ce..085fad0afb 100644 --- a/src/main/java/graphql/normalized/ExecutableNormalizedOperationFactory.java +++ b/src/main/java/graphql/normalized/ExecutableNormalizedOperationFactory.java @@ -4,6 +4,8 @@ import com.google.common.collect.ImmutableListMultimap; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; +import graphql.Assert; +import graphql.ExperimentalApi; import graphql.GraphQLContext; import graphql.PublicApi; import graphql.collect.ImmutableKit; @@ -15,7 +17,9 @@ import graphql.execution.conditional.ConditionalNodes; import graphql.execution.directives.QueryDirectives; import graphql.execution.directives.QueryDirectivesImpl; +import graphql.execution.incremental.IncrementalUtils; import graphql.introspection.Introspection; +import graphql.language.Directive; import graphql.language.Document; import graphql.language.Field; import graphql.language.FragmentDefinition; @@ -26,6 +30,7 @@ import graphql.language.Selection; import graphql.language.SelectionSet; import graphql.language.VariableDefinition; +import graphql.normalized.incremental.NormalizedDeferredExecution; import graphql.schema.FieldCoordinates; import graphql.schema.GraphQLCompositeType; import graphql.schema.GraphQLFieldDefinition; @@ -34,7 +39,6 @@ import graphql.schema.GraphQLObjectType; import graphql.schema.GraphQLSchema; import graphql.schema.GraphQLType; -import graphql.schema.GraphQLTypeUtil; import graphql.schema.GraphQLUnionType; import graphql.schema.GraphQLUnmodifiedType; import graphql.schema.impl.SchemaUtil; @@ -43,11 +47,15 @@ import java.util.ArrayList; import java.util.Collection; import java.util.LinkedHashMap; +import java.util.LinkedHashSet; import java.util.List; import java.util.Locale; import java.util.Map; +import java.util.Objects; import java.util.Set; -import java.util.function.BiConsumer; +import java.util.function.Function; +import java.util.function.Predicate; +import java.util.stream.Collectors; import static graphql.Assert.assertNotNull; import static graphql.Assert.assertShouldNeverHappen; @@ -58,6 +66,8 @@ import static graphql.util.FpKit.intersection; import static java.util.Collections.singleton; import static java.util.Collections.singletonList; +import static java.util.stream.Collectors.toCollection; +import static java.util.stream.Collectors.toSet; /** * This factory can create a {@link ExecutableNormalizedOperation} which represents what would be executed @@ -65,24 +75,58 @@ */ @PublicApi public class ExecutableNormalizedOperationFactory { + public static class Options { + + private final GraphQLContext graphQLContext; private final Locale locale; private final int maxChildrenDepth; + private final int maxFieldsCount; + + private final boolean deferSupport; + + /** + * The default max fields count is 100,000. + * This is big enough for even very large queries, but + * can be changed via {#setDefaultOptions + */ + public static final int DEFAULT_MAX_FIELDS_COUNT = 100_000; + private static Options defaultOptions = new Options(GraphQLContext.getDefault(), + Locale.getDefault(), + Integer.MAX_VALUE, + DEFAULT_MAX_FIELDS_COUNT, + false); private Options(GraphQLContext graphQLContext, Locale locale, - int maxChildrenDepth) { + int maxChildrenDepth, + int maxFieldsCount, + boolean deferSupport) { this.graphQLContext = graphQLContext; this.locale = locale; this.maxChildrenDepth = maxChildrenDepth; + this.deferSupport = deferSupport; + this.maxFieldsCount = maxFieldsCount; } + /** + * Sets new default Options used when creating instances of {@link ExecutableNormalizedOperation}. + * + * @param options new default options + */ + public static void setDefaultOptions(Options options) { + defaultOptions = Assert.assertNotNull(options); + } + + + /** + * Returns the default options used when creating instances of {@link ExecutableNormalizedOperation}. + * + * @return the default options + */ public static Options defaultOptions() { - return new Options( - GraphQLContext.getDefault(), - Locale.getDefault(), - Integer.MAX_VALUE); + return defaultOptions; } /** @@ -91,10 +135,11 @@ public static Options defaultOptions() { * e.g. can be passed to {@link graphql.schema.Coercing} for parsing. * * @param locale the locale to use + * * @return new options object to use */ public Options locale(Locale locale) { - return new Options(this.graphQLContext, locale, this.maxChildrenDepth); + return new Options(this.graphQLContext, locale, this.maxChildrenDepth, this.maxFieldsCount, this.deferSupport); } /** @@ -103,10 +148,11 @@ public Options locale(Locale locale) { * Can be used to intercept input values e.g. using {@link graphql.execution.values.InputInterceptor}. * * @param graphQLContext the context to use + * * @return new options object to use */ public Options graphQLContext(GraphQLContext graphQLContext) { - return new Options(graphQLContext, this.locale, this.maxChildrenDepth); + return new Options(graphQLContext, this.locale, this.maxChildrenDepth, this.maxFieldsCount, this.deferSupport); } /** @@ -114,14 +160,40 @@ public Options graphQLContext(GraphQLContext graphQLContext) { * against malicious operations. * * @param maxChildrenDepth the max depth + * * @return new options object to use */ public Options maxChildrenDepth(int maxChildrenDepth) { - return new Options(this.graphQLContext, this.locale, maxChildrenDepth); + return new Options(this.graphQLContext, this.locale, maxChildrenDepth, this.maxFieldsCount, this.deferSupport); + } + + /** + * Controls the maximum number of ENFs created. Can be used to prevent + * against malicious operations. + * + * @param maxFieldsCount the max number of ENFs created + * + * @return new options object to use + */ + public Options maxFieldsCount(int maxFieldsCount) { + return new Options(this.graphQLContext, this.locale, this.maxChildrenDepth, maxFieldsCount, this.deferSupport); + } + + /** + * Controls whether defer execution is supported when creating instances of {@link ExecutableNormalizedOperation}. + * + * @param deferSupport true to enable support for defer + * + * @return new options object to use + */ + @ExperimentalApi + public Options deferSupport(boolean deferSupport) { + return new Options(this.graphQLContext, this.locale, this.maxChildrenDepth, this.maxFieldsCount, deferSupport); } /** * @return context to use during operation parsing + * * @see #graphQLContext(GraphQLContext) */ public GraphQLContext getGraphQLContext() { @@ -130,6 +202,7 @@ public GraphQLContext getGraphQLContext() { /** * @return locale to use during operation parsing + * * @see #locale(Locale) */ public Locale getLocale() { @@ -138,14 +211,33 @@ public Locale getLocale() { /** * @return maximum children depth before aborting parsing + * * @see #maxChildrenDepth(int) */ public int getMaxChildrenDepth() { return maxChildrenDepth; } + + public int getMaxFieldsCount() { + return maxFieldsCount; + } + + /** + * @return whether support for defer is enabled + * + * @see #deferSupport(boolean) + */ + @ExperimentalApi + public boolean getDeferSupport() { + return deferSupport; + } } - private final ConditionalNodes conditionalNodes = new ConditionalNodes(); + private static final ConditionalNodes conditionalNodes = new ConditionalNodes(); + + private ExecutableNormalizedOperationFactory() { + + } /** * This will create a runtime representation of the graphql operation that would be executed @@ -163,14 +255,44 @@ public static ExecutableNormalizedOperation createExecutableNormalizedOperation( Document document, String operationName, CoercedVariables coercedVariableValues + ) { + return createExecutableNormalizedOperation( + graphQLSchema, + document, + operationName, + coercedVariableValues, + Options.defaultOptions()); + } + + /** + * This will create a runtime representation of the graphql operation that would be executed + * in a runtime sense. + * + * @param graphQLSchema the schema to be used + * @param document the {@link Document} holding the operation text + * @param operationName the operation name to use + * @param coercedVariableValues the coerced variables to use + * @param options the {@link Options} to use for parsing + * + * @return a runtime representation of the graphql operation. + */ + public static ExecutableNormalizedOperation createExecutableNormalizedOperation( + GraphQLSchema graphQLSchema, + Document document, + String operationName, + CoercedVariables coercedVariableValues, + Options options ) { NodeUtil.GetOperationResult getOperationResult = NodeUtil.getOperation(document, operationName); - return new ExecutableNormalizedOperationFactory().createNormalizedQueryImpl(graphQLSchema, + + return new ExecutableNormalizedOperationFactoryImpl( + graphQLSchema, getOperationResult.operationDefinition, getOperationResult.fragmentsByName, coercedVariableValues, null, - Options.defaultOptions()); + options + ).createNormalizedQueryImpl(); } /** @@ -188,14 +310,40 @@ public static ExecutableNormalizedOperation createExecutableNormalizedOperation( OperationDefinition operationDefinition, Map fragments, CoercedVariables coercedVariableValues) { - return new ExecutableNormalizedOperationFactory().createNormalizedQueryImpl(graphQLSchema, + return createExecutableNormalizedOperation(graphQLSchema, operationDefinition, fragments, coercedVariableValues, - null, Options.defaultOptions()); } + /** + * This will create a runtime representation of the graphql operation that would be executed + * in a runtime sense. + * + * @param graphQLSchema the schema to be used + * @param operationDefinition the operation to be executed + * @param fragments a set of fragments associated with the operation + * @param coercedVariableValues the coerced variables to use + * @param options the options to use + * + * @return a runtime representation of the graphql operation. + */ + public static ExecutableNormalizedOperation createExecutableNormalizedOperation(GraphQLSchema graphQLSchema, + OperationDefinition operationDefinition, + Map fragments, + CoercedVariables coercedVariableValues, + Options options) { + return new ExecutableNormalizedOperationFactoryImpl( + graphQLSchema, + operationDefinition, + fragments, + coercedVariableValues, + null, + options + ).createNormalizedQueryImpl(); + } + /** * This will create a runtime representation of the graphql operation that would be executed * in a runtime sense. @@ -267,20 +415,8 @@ public static ExecutableNormalizedOperation createExecutableNormalizedOperationW RawVariables rawVariables, Options options) { NodeUtil.GetOperationResult getOperationResult = NodeUtil.getOperation(document, operationName); + OperationDefinition operationDefinition = getOperationResult.operationDefinition; - return new ExecutableNormalizedOperationFactory().createExecutableNormalizedOperationImplWithRawVariables(graphQLSchema, - getOperationResult.operationDefinition, - getOperationResult.fragmentsByName, - rawVariables, - options - ); - } - - private ExecutableNormalizedOperation createExecutableNormalizedOperationImplWithRawVariables(GraphQLSchema graphQLSchema, - OperationDefinition operationDefinition, - Map fragments, - RawVariables rawVariables, - Options options) { List variableDefinitions = operationDefinition.getVariableDefinitions(); CoercedVariables coercedVariableValues = ValuesResolver.coerceVariableValues(graphQLSchema, variableDefinitions, @@ -292,437 +428,551 @@ private ExecutableNormalizedOperation createExecutableNormalizedOperationImplWit rawVariables, options.getGraphQLContext(), options.getLocale()); - return createNormalizedQueryImpl(graphQLSchema, + + return new ExecutableNormalizedOperationFactoryImpl( + graphQLSchema, operationDefinition, - fragments, + getOperationResult.fragmentsByName, coercedVariableValues, normalizedVariableValues, - options); + options + ).createNormalizedQueryImpl(); } - /** - * Creates a new ExecutableNormalizedOperation for the provided query - */ - private ExecutableNormalizedOperation createNormalizedQueryImpl(GraphQLSchema graphQLSchema, - OperationDefinition operationDefinition, - Map fragments, - CoercedVariables coercedVariableValues, - @Nullable Map normalizedVariableValues, - Options options) { - FieldCollectorNormalizedQueryParams parameters = FieldCollectorNormalizedQueryParams - .newParameters() - .fragments(fragments) - .schema(graphQLSchema) - .coercedVariables(coercedVariableValues.toMap()) - .normalizedVariables(normalizedVariableValues) - .build(); - - GraphQLObjectType rootType = SchemaUtil.getOperationRootType(graphQLSchema, operationDefinition); - - CollectNFResult collectFromOperationResult = collectFromOperation(parameters, operationDefinition, rootType); - - ImmutableListMultimap.Builder fieldToNormalizedField = ImmutableListMultimap.builder(); - ImmutableMap.Builder normalizedFieldToMergedField = ImmutableMap.builder(); - ImmutableMap.Builder normalizedFieldToQueryDirectives = ImmutableMap.builder(); - ImmutableListMultimap.Builder coordinatesToNormalizedFields = ImmutableListMultimap.builder(); - - BiConsumer captureMergedField = (enf, mergedFld) -> { + + private static class ExecutableNormalizedOperationFactoryImpl { + private final GraphQLSchema graphQLSchema; + private final OperationDefinition operationDefinition; + private final Map fragments; + private final CoercedVariables coercedVariableValues; + private final @Nullable Map normalizedVariableValues; + private final Options options; + + private final List possibleMergerList = new ArrayList<>(); + + private final ImmutableListMultimap.Builder fieldToNormalizedField = ImmutableListMultimap.builder(); + private final ImmutableMap.Builder normalizedFieldToMergedField = ImmutableMap.builder(); + private final ImmutableMap.Builder normalizedFieldToQueryDirectives = ImmutableMap.builder(); + private final ImmutableListMultimap.Builder coordinatesToNormalizedFields = ImmutableListMultimap.builder(); + private int fieldCount = 0; + private int maxDepthSeen = 0; + + private ExecutableNormalizedOperationFactoryImpl( + GraphQLSchema graphQLSchema, + OperationDefinition operationDefinition, + Map fragments, + CoercedVariables coercedVariableValues, + @Nullable Map normalizedVariableValues, + Options options + ) { + this.graphQLSchema = graphQLSchema; + this.operationDefinition = operationDefinition; + this.fragments = fragments; + this.coercedVariableValues = coercedVariableValues; + this.normalizedVariableValues = normalizedVariableValues; + this.options = options; + } + + /** + * Creates a new ExecutableNormalizedOperation for the provided query + */ + private ExecutableNormalizedOperation createNormalizedQueryImpl() { + GraphQLObjectType rootType = SchemaUtil.getOperationRootType(graphQLSchema, operationDefinition); + + CollectNFResult collectFromOperationResult = collectFromOperation(rootType); + + for (ExecutableNormalizedField topLevel : collectFromOperationResult.children) { + ImmutableList fieldAndAstParents = collectFromOperationResult.normalizedFieldToAstFields.get(topLevel); + MergedField mergedField = newMergedField(fieldAndAstParents); + + captureMergedField(topLevel, mergedField); + + updateFieldToNFMap(topLevel, fieldAndAstParents); + updateCoordinatedToNFMap(topLevel); + + int depthSeen = buildFieldWithChildren( + topLevel, + fieldAndAstParents, + 1); + maxDepthSeen = Math.max(maxDepthSeen, depthSeen); + } + // getPossibleMergerList + for (PossibleMerger possibleMerger : possibleMergerList) { + List childrenWithSameResultKey = possibleMerger.parent.getChildrenWithSameResultKey(possibleMerger.resultKey); + ENFMerger.merge(possibleMerger.parent, childrenWithSameResultKey, graphQLSchema, options.deferSupport); + } + return new ExecutableNormalizedOperation( + operationDefinition.getOperation(), + operationDefinition.getName(), + new ArrayList<>(collectFromOperationResult.children), + fieldToNormalizedField.build(), + normalizedFieldToMergedField.build(), + normalizedFieldToQueryDirectives.build(), + coordinatesToNormalizedFields.build(), + fieldCount, + maxDepthSeen + ); + } + + private void captureMergedField(ExecutableNormalizedField enf, MergedField mergedFld) { // QueryDirectivesImpl is a lazy object and only computes itself when asked for QueryDirectives queryDirectives = new QueryDirectivesImpl(mergedFld, graphQLSchema, coercedVariableValues.toMap(), options.getGraphQLContext(), options.getLocale()); normalizedFieldToQueryDirectives.put(enf, queryDirectives); normalizedFieldToMergedField.put(enf, mergedFld); - }; - - for (ExecutableNormalizedField topLevel : collectFromOperationResult.children) { - ImmutableList fieldAndAstParents = collectFromOperationResult.normalizedFieldToAstFields.get(topLevel); - MergedField mergedField = newMergedField(fieldAndAstParents); - - captureMergedField.accept(topLevel, mergedField); - - updateFieldToNFMap(topLevel, fieldAndAstParents, fieldToNormalizedField); - updateCoordinatedToNFMap(coordinatesToNormalizedFields, topLevel); - - buildFieldWithChildren( - topLevel, - fieldAndAstParents, - parameters, - fieldToNormalizedField, - captureMergedField, - coordinatesToNormalizedFields, - 1, - options.getMaxChildrenDepth()); - } - for (FieldCollectorNormalizedQueryParams.PossibleMerger possibleMerger : parameters.getPossibleMergerList()) { - List childrenWithSameResultKey = possibleMerger.parent.getChildrenWithSameResultKey(possibleMerger.resultKey); - ENFMerger.merge(possibleMerger.parent, childrenWithSameResultKey, graphQLSchema); - } - return new ExecutableNormalizedOperation( - operationDefinition.getOperation(), - operationDefinition.getName(), - new ArrayList<>(collectFromOperationResult.children), - fieldToNormalizedField.build(), - normalizedFieldToMergedField.build(), - normalizedFieldToQueryDirectives.build(), - coordinatesToNormalizedFields.build() - ); - } + } + private int buildFieldWithChildren(ExecutableNormalizedField executableNormalizedField, + ImmutableList fieldAndAstParents, + int curLevel) { + checkMaxDepthExceeded(curLevel); - private void buildFieldWithChildren(ExecutableNormalizedField executableNormalizedField, - ImmutableList fieldAndAstParents, - FieldCollectorNormalizedQueryParams fieldCollectorNormalizedQueryParams, - ImmutableListMultimap.Builder fieldNormalizedField, - BiConsumer captureMergedField, - ImmutableListMultimap.Builder coordinatesToNormalizedFields, - int curLevel, - int maxLevel) { - if (curLevel > maxLevel) { - throw new AbortExecutionException("Maximum query depth exceeded " + curLevel + " > " + maxLevel); - } + CollectNFResult nextLevel = collectFromMergedField(executableNormalizedField, fieldAndAstParents, curLevel + 1); - CollectNFResult nextLevel = collectFromMergedField(fieldCollectorNormalizedQueryParams, executableNormalizedField, fieldAndAstParents, curLevel + 1); + int maxDepthSeen = curLevel; + for (ExecutableNormalizedField childENF : nextLevel.children) { + executableNormalizedField.addChild(childENF); + ImmutableList childFieldAndAstParents = nextLevel.normalizedFieldToAstFields.get(childENF); - for (ExecutableNormalizedField childENF : nextLevel.children) { - executableNormalizedField.addChild(childENF); - ImmutableList childFieldAndAstParents = nextLevel.normalizedFieldToAstFields.get(childENF); + MergedField mergedField = newMergedField(childFieldAndAstParents); + captureMergedField(childENF, mergedField); - MergedField mergedField = newMergedField(childFieldAndAstParents); - captureMergedField.accept(childENF, mergedField); + updateFieldToNFMap(childENF, childFieldAndAstParents); + updateCoordinatedToNFMap(childENF); - updateFieldToNFMap(childENF, childFieldAndAstParents, fieldNormalizedField); - updateCoordinatedToNFMap(coordinatesToNormalizedFields, childENF); + int depthSeen = buildFieldWithChildren(childENF, + childFieldAndAstParents, + curLevel + 1); + maxDepthSeen = Math.max(maxDepthSeen, depthSeen); - buildFieldWithChildren(childENF, - childFieldAndAstParents, - fieldCollectorNormalizedQueryParams, - fieldNormalizedField, - captureMergedField, - coordinatesToNormalizedFields, - curLevel + 1, - maxLevel); + checkMaxDepthExceeded(maxDepthSeen); + } + return maxDepthSeen; } - } - private static MergedField newMergedField(ImmutableList fieldAndAstParents) { - return MergedField.newMergedField(map(fieldAndAstParents, fieldAndAstParent -> fieldAndAstParent.field)).build(); - } - - private void updateFieldToNFMap(ExecutableNormalizedField executableNormalizedField, - ImmutableList mergedField, - ImmutableListMultimap.Builder fieldToNormalizedField) { - for (FieldAndAstParent astField : mergedField) { - fieldToNormalizedField.put(astField.field, executableNormalizedField); + private void checkMaxDepthExceeded(int depthSeen) { + if (depthSeen > this.options.getMaxChildrenDepth()) { + throw new AbortExecutionException("Maximum query depth exceeded. " + depthSeen + " > " + this.options.getMaxChildrenDepth()); + } } - } - private void updateCoordinatedToNFMap(ImmutableListMultimap.Builder coordinatesToNormalizedFields, ExecutableNormalizedField topLevel) { - for (String objectType : topLevel.getObjectTypeNames()) { - FieldCoordinates coordinates = FieldCoordinates.coordinates(objectType, topLevel.getFieldName()); - coordinatesToNormalizedFields.put(coordinates, topLevel); + private static MergedField newMergedField(ImmutableList fieldAndAstParents) { + return MergedField.newMergedField(map(fieldAndAstParents, fieldAndAstParent -> fieldAndAstParent.field)).build(); } - } - private static class FieldAndAstParent { - final Field field; - final GraphQLCompositeType astParentType; + private void updateFieldToNFMap(ExecutableNormalizedField executableNormalizedField, + ImmutableList mergedField) { + for (FieldAndAstParent astField : mergedField) { + fieldToNormalizedField.put(astField.field, executableNormalizedField); + } + } - private FieldAndAstParent(Field field, GraphQLCompositeType astParentType) { - this.field = field; - this.astParentType = astParentType; + private void updateCoordinatedToNFMap(ExecutableNormalizedField topLevel) { + for (String objectType : topLevel.getObjectTypeNames()) { + FieldCoordinates coordinates = FieldCoordinates.coordinates(objectType, topLevel.getFieldName()); + coordinatesToNormalizedFields.put(coordinates, topLevel); + } } - } + public CollectNFResult collectFromMergedField(ExecutableNormalizedField executableNormalizedField, + ImmutableList mergedField, + int level) { + List fieldDefs = executableNormalizedField.getFieldDefinitions(graphQLSchema); + Set possibleObjects = resolvePossibleObjects(fieldDefs); + if (possibleObjects.isEmpty()) { + return new CollectNFResult(ImmutableKit.emptyList(), ImmutableListMultimap.of()); + } + + List collectedFields = new ArrayList<>(); + for (FieldAndAstParent fieldAndAstParent : mergedField) { + if (fieldAndAstParent.field.getSelectionSet() == null) { + continue; + } + GraphQLFieldDefinition fieldDefinition = Introspection.getFieldDef(graphQLSchema, fieldAndAstParent.astParentType, fieldAndAstParent.field.getName()); + GraphQLUnmodifiedType astParentType = unwrapAll(fieldDefinition.getType()); + this.collectFromSelectionSet(fieldAndAstParent.field.getSelectionSet(), + collectedFields, + (GraphQLCompositeType) astParentType, + possibleObjects, + null + ); + } + Map> fieldsByName = fieldsByResultKey(collectedFields); + ImmutableList.Builder resultNFs = ImmutableList.builder(); + ImmutableListMultimap.Builder normalizedFieldToAstFields = ImmutableListMultimap.builder(); - public static class CollectNFResult { - private final Collection children; - private final ImmutableListMultimap normalizedFieldToAstFields; + createNFs(resultNFs, fieldsByName, normalizedFieldToAstFields, level, executableNormalizedField); - public CollectNFResult(Collection children, ImmutableListMultimap normalizedFieldToAstFields) { - this.children = children; - this.normalizedFieldToAstFields = normalizedFieldToAstFields; + return new CollectNFResult(resultNFs.build(), normalizedFieldToAstFields.build()); } - } - - public CollectNFResult collectFromMergedField(FieldCollectorNormalizedQueryParams parameters, - ExecutableNormalizedField executableNormalizedField, - ImmutableList mergedField, - int level) { - List fieldDefs = executableNormalizedField.getFieldDefinitions(parameters.getGraphQLSchema()); - Set possibleObjects = resolvePossibleObjects(fieldDefs, parameters.getGraphQLSchema()); - if (possibleObjects.isEmpty()) { - return new CollectNFResult(ImmutableKit.emptyList(), ImmutableListMultimap.of()); - } - - List collectedFields = new ArrayList<>(); - for (FieldAndAstParent fieldAndAstParent : mergedField) { - if (fieldAndAstParent.field.getSelectionSet() == null) { - continue; - } - GraphQLFieldDefinition fieldDefinition = Introspection.getFieldDef(parameters.getGraphQLSchema(), fieldAndAstParent.astParentType, fieldAndAstParent.field.getName()); - GraphQLUnmodifiedType astParentType = unwrapAll(fieldDefinition.getType()); - this.collectFromSelectionSet(parameters, - fieldAndAstParent.field.getSelectionSet(), - collectedFields, - (GraphQLCompositeType) astParentType, - possibleObjects - ); + private Map> fieldsByResultKey(List collectedFields) { + Map> fieldsByName = new LinkedHashMap<>(); + for (CollectedField collectedField : collectedFields) { + fieldsByName.computeIfAbsent(collectedField.field.getResultKey(), ignored -> new ArrayList<>()).add(collectedField); + } + return fieldsByName; + } + + public CollectNFResult collectFromOperation(GraphQLObjectType rootType) { + + + Set possibleObjects = ImmutableSet.of(rootType); + List collectedFields = new ArrayList<>(); + collectFromSelectionSet(operationDefinition.getSelectionSet(), collectedFields, rootType, possibleObjects, null); + // group by result key + Map> fieldsByName = fieldsByResultKey(collectedFields); + ImmutableList.Builder resultNFs = ImmutableList.builder(); + ImmutableListMultimap.Builder normalizedFieldToAstFields = ImmutableListMultimap.builder(); + + createNFs(resultNFs, fieldsByName, normalizedFieldToAstFields, 1, null); + + return new CollectNFResult(resultNFs.build(), normalizedFieldToAstFields.build()); + } + + private void createNFs(ImmutableList.Builder nfListBuilder, + Map> fieldsByName, + ImmutableListMultimap.Builder normalizedFieldToAstFields, + int level, + ExecutableNormalizedField parent) { + for (String resultKey : fieldsByName.keySet()) { + List fieldsWithSameResultKey = fieldsByName.get(resultKey); + List commonParentsGroups = groupByCommonParents(fieldsWithSameResultKey); + for (CollectedFieldGroup fieldGroup : commonParentsGroups) { + ExecutableNormalizedField nf = createNF(fieldGroup, level, parent); + if (nf == null) { + continue; + } + for (CollectedField collectedField : fieldGroup.fields) { + normalizedFieldToAstFields.put(nf, new FieldAndAstParent(collectedField.field, collectedField.astTypeCondition)); + } + nfListBuilder.add(nf); + + if (this.options.deferSupport) { + nf.addDeferredExecutions(fieldGroup.deferredExecutions); + } + } + if (commonParentsGroups.size() > 1) { + possibleMergerList.add(new PossibleMerger(parent, resultKey)); + } + } } - Map> fieldsByName = fieldsByResultKey(collectedFields); - ImmutableList.Builder resultNFs = ImmutableList.builder(); - ImmutableListMultimap.Builder normalizedFieldToAstFields = ImmutableListMultimap.builder(); - createNFs(resultNFs, parameters, fieldsByName, normalizedFieldToAstFields, level, executableNormalizedField); + private ExecutableNormalizedField createNF(CollectedFieldGroup collectedFieldGroup, + int level, + ExecutableNormalizedField parent) { - return new CollectNFResult(resultNFs.build(), normalizedFieldToAstFields.build()); - } + this.fieldCount++; + if (this.fieldCount > this.options.getMaxFieldsCount()) { + throw new AbortExecutionException("Maximum field count exceeded. " + this.fieldCount + " > " + this.options.getMaxFieldsCount()); + } + Field field; + Set objectTypes = collectedFieldGroup.objectTypes; + field = collectedFieldGroup.fields.iterator().next().field; + String fieldName = field.getName(); + GraphQLFieldDefinition fieldDefinition = Introspection.getFieldDefinition(graphQLSchema, objectTypes.iterator().next(), fieldName); + + Map argumentValues = ValuesResolver.getArgumentValues(fieldDefinition.getArguments(), field.getArguments(), CoercedVariables.of(this.coercedVariableValues.toMap()), this.options.graphQLContext, this.options.locale); + Map normalizedArgumentValues = null; + if (this.normalizedVariableValues != null) { + normalizedArgumentValues = ValuesResolver.getNormalizedArgumentValues(fieldDefinition.getArguments(), field.getArguments(), this.normalizedVariableValues); + } + ImmutableList objectTypeNames = map(objectTypes, GraphQLObjectType::getName); + return ExecutableNormalizedField.newNormalizedField() + .alias(field.getAlias()) + .resolvedArguments(argumentValues) + .normalizedArguments(normalizedArgumentValues) + .astArguments(field.getArguments()) + .objectTypeNames(objectTypeNames) + .fieldName(fieldName) + .level(level) + .parent(parent) + .build(); + } + + private List groupByCommonParents(Collection fields) { + if (this.options.deferSupport) { + return groupByCommonParentsWithDeferSupport(fields); + } else { + return groupByCommonParentsNoDeferSupport(fields); + } + } - private Map> fieldsByResultKey(List collectedFields) { - Map> fieldsByName = new LinkedHashMap<>(); - for (CollectedField collectedField : collectedFields) { - fieldsByName.computeIfAbsent(collectedField.field.getResultKey(), ignored -> new ArrayList<>()).add(collectedField); + private List groupByCommonParentsNoDeferSupport(Collection fields) { + ImmutableSet.Builder objectTypes = ImmutableSet.builder(); + for (CollectedField collectedField : fields) { + objectTypes.addAll(collectedField.objectTypes); + } + Set allRelevantObjects = objectTypes.build(); + Map> groupByAstParent = groupingBy(fields, fieldAndType -> fieldAndType.astTypeCondition); + if (groupByAstParent.size() == 1) { + return singletonList(new CollectedFieldGroup(ImmutableSet.copyOf(fields), allRelevantObjects, null)); + } + ImmutableList.Builder result = ImmutableList.builder(); + for (GraphQLObjectType objectType : allRelevantObjects) { + Set relevantFields = filterSet(fields, field -> field.objectTypes.contains(objectType)); + result.add(new CollectedFieldGroup(relevantFields, singleton(objectType), null)); + } + return result.build(); } - return fieldsByName; - } - public CollectNFResult collectFromOperation(FieldCollectorNormalizedQueryParams parameters, - OperationDefinition operationDefinition, - GraphQLObjectType rootType) { + private List groupByCommonParentsWithDeferSupport(Collection fields) { + ImmutableSet.Builder objectTypes = ImmutableSet.builder(); + ImmutableSet.Builder deferredExecutionsBuilder = ImmutableSet.builder(); + for (CollectedField collectedField : fields) { + objectTypes.addAll(collectedField.objectTypes); - Set possibleObjects = ImmutableSet.of(rootType); - List collectedFields = new ArrayList<>(); - collectFromSelectionSet(parameters, operationDefinition.getSelectionSet(), collectedFields, rootType, possibleObjects); - // group by result key - Map> fieldsByName = fieldsByResultKey(collectedFields); - ImmutableList.Builder resultNFs = ImmutableList.builder(); - ImmutableListMultimap.Builder normalizedFieldToAstFields = ImmutableListMultimap.builder(); + NormalizedDeferredExecution collectedDeferredExecution = collectedField.deferredExecution; - createNFs(resultNFs, parameters, fieldsByName, normalizedFieldToAstFields, 1, null); + if (collectedDeferredExecution != null) { + deferredExecutionsBuilder.add(collectedDeferredExecution); + } + } - return new CollectNFResult(resultNFs.build(), normalizedFieldToAstFields.build()); - } + Set allRelevantObjects = objectTypes.build(); + Set deferredExecutions = deferredExecutionsBuilder.build(); - private void createNFs(ImmutableList.Builder nfListBuilder, - FieldCollectorNormalizedQueryParams parameters, - Map> fieldsByName, - ImmutableListMultimap.Builder normalizedFieldToAstFields, - int level, - ExecutableNormalizedField parent) { - for (String resultKey : fieldsByName.keySet()) { - List fieldsWithSameResultKey = fieldsByName.get(resultKey); - List commonParentsGroups = groupByCommonParents(fieldsWithSameResultKey); - for (CollectedFieldGroup fieldGroup : commonParentsGroups) { - ExecutableNormalizedField nf = createNF(parameters, fieldGroup, level, parent); - if (nf == null) { - continue; - } - for (CollectedField collectedField : fieldGroup.fields) { - normalizedFieldToAstFields.put(nf, new FieldAndAstParent(collectedField.field, collectedField.astTypeCondition)); - } - nfListBuilder.add(nf); + Set duplicatedLabels = listDuplicatedLabels(deferredExecutions); + + if (!duplicatedLabels.isEmpty()) { + // Query validation should pick this up + Assert.assertShouldNeverHappen("Duplicated @defer labels are not allowed: [%s]", String.join(",", duplicatedLabels)); } - if (commonParentsGroups.size() > 1) { - parameters.addPossibleMergers(parent, resultKey); + + Map> groupByAstParent = groupingBy(fields, fieldAndType -> fieldAndType.astTypeCondition); + if (groupByAstParent.size() == 1) { + return singletonList(new CollectedFieldGroup(ImmutableSet.copyOf(fields), allRelevantObjects, deferredExecutions)); } - } - } - private ExecutableNormalizedField createNF(FieldCollectorNormalizedQueryParams parameters, - CollectedFieldGroup collectedFieldGroup, - int level, - ExecutableNormalizedField parent) { - Field field; - Set objectTypes = collectedFieldGroup.objectTypes; - field = collectedFieldGroup.fields.iterator().next().field; - String fieldName = field.getName(); - GraphQLFieldDefinition fieldDefinition = Introspection.getFieldDef(parameters.getGraphQLSchema(), objectTypes.iterator().next(), fieldName); - - Map argumentValues = ValuesResolver.getArgumentValues(fieldDefinition.getArguments(), field.getArguments(), CoercedVariables.of(parameters.getCoercedVariableValues()), parameters.getGraphQLContext(), parameters.getLocale()); - Map normalizedArgumentValues = null; - if (parameters.getNormalizedVariableValues() != null) { - normalizedArgumentValues = ValuesResolver.getNormalizedArgumentValues(fieldDefinition.getArguments(), field.getArguments(), parameters.getNormalizedVariableValues()); - } - ImmutableList objectTypeNames = map(objectTypes, GraphQLObjectType::getName); - - return ExecutableNormalizedField.newNormalizedField() - .alias(field.getAlias()) - .resolvedArguments(argumentValues) - .normalizedArguments(normalizedArgumentValues) - .astArguments(field.getArguments()) - .objectTypeNames(objectTypeNames) - .fieldName(fieldName) - .level(level) - .parent(parent) - .build(); - } + ImmutableList.Builder result = ImmutableList.builder(); + for (GraphQLObjectType objectType : allRelevantObjects) { + Set relevantFields = filterSet(fields, field -> field.objectTypes.contains(objectType)); - private static class CollectedFieldGroup { - Set objectTypes; - Set fields; + Set filteredDeferredExecutions = deferredExecutions.stream() + .filter(filterExecutionsFromType(objectType)) + .collect(toCollection(LinkedHashSet::new)); - public CollectedFieldGroup(Set fields, Set objectTypes) { - this.fields = fields; - this.objectTypes = objectTypes; + result.add(new CollectedFieldGroup(relevantFields, singleton(objectType), filteredDeferredExecutions)); + } + return result.build(); + } + + private static Predicate filterExecutionsFromType(GraphQLObjectType objectType) { + String objectTypeName = objectType.getName(); + return deferredExecution -> deferredExecution.getPossibleTypes() + .stream() + .map(GraphQLObjectType::getName) + .anyMatch(objectTypeName::equals); + } + + private Set listDuplicatedLabels(Collection deferredExecutions) { + return deferredExecutions.stream() + .map(NormalizedDeferredExecution::getLabel) + .filter(Objects::nonNull) + .collect(Collectors.groupingBy(Function.identity(), Collectors.counting())) + .entrySet() + .stream() + .filter(entry -> entry.getValue() > 1) + .map(Map.Entry::getKey) + .collect(toSet()); + } + + private void collectFromSelectionSet(SelectionSet selectionSet, + List result, + GraphQLCompositeType astTypeCondition, + Set possibleObjects, + NormalizedDeferredExecution deferredExecution + ) { + for (Selection selection : selectionSet.getSelections()) { + if (selection instanceof Field) { + collectField(result, (Field) selection, possibleObjects, astTypeCondition, deferredExecution); + } else if (selection instanceof InlineFragment) { + collectInlineFragment(result, (InlineFragment) selection, possibleObjects, astTypeCondition); + } else if (selection instanceof FragmentSpread) { + collectFragmentSpread(result, (FragmentSpread) selection, possibleObjects); + } + } } - } - private List groupByCommonParents(Collection fields) { - ImmutableSet.Builder objectTypes = ImmutableSet.builder(); - for (CollectedField collectedField : fields) { - objectTypes.addAll(collectedField.objectTypes); - } - Set allRelevantObjects = objectTypes.build(); - Map> groupByAstParent = groupingBy(fields, fieldAndType -> fieldAndType.astTypeCondition); - if (groupByAstParent.size() == 1) { - return singletonList(new CollectedFieldGroup(ImmutableSet.copyOf(fields), allRelevantObjects)); - } - ImmutableList.Builder result = ImmutableList.builder(); - for (GraphQLObjectType objectType : allRelevantObjects) { - Set relevantFields = filterSet(fields, field -> field.objectTypes.contains(objectType)); - result.add(new CollectedFieldGroup(relevantFields, singleton(objectType))); + private void collectFragmentSpread(List result, + FragmentSpread fragmentSpread, + Set possibleObjects + ) { + if (!conditionalNodes.shouldInclude(fragmentSpread, + this.coercedVariableValues.toMap(), + this.graphQLSchema, + this.options.graphQLContext)) { + return; + } + FragmentDefinition fragmentDefinition = assertNotNull(this.fragments.get(fragmentSpread.getName())); + + if (!conditionalNodes.shouldInclude(fragmentDefinition, + this.coercedVariableValues.toMap(), + this.graphQLSchema, + this.options.graphQLContext)) { + return; + } + GraphQLCompositeType newAstTypeCondition = (GraphQLCompositeType) assertNotNull(this.graphQLSchema.getType(fragmentDefinition.getTypeCondition().getName())); + Set newPossibleObjects = narrowDownPossibleObjects(possibleObjects, newAstTypeCondition); + + NormalizedDeferredExecution newDeferredExecution = buildDeferredExecution( + fragmentSpread.getDirectives(), + newPossibleObjects); + + collectFromSelectionSet(fragmentDefinition.getSelectionSet(), result, newAstTypeCondition, newPossibleObjects, newDeferredExecution); } - return result.build(); - } + private void collectInlineFragment(List result, + InlineFragment inlineFragment, + Set possibleObjects, + GraphQLCompositeType astTypeCondition + ) { + if (!conditionalNodes.shouldInclude(inlineFragment, this.coercedVariableValues.toMap(), this.graphQLSchema, this.options.graphQLContext)) { + return; + } + Set newPossibleObjects = possibleObjects; + GraphQLCompositeType newAstTypeCondition = astTypeCondition; + + if (inlineFragment.getTypeCondition() != null) { + newAstTypeCondition = (GraphQLCompositeType) this.graphQLSchema.getType(inlineFragment.getTypeCondition().getName()); + newPossibleObjects = narrowDownPossibleObjects(possibleObjects, newAstTypeCondition); - private void collectFromSelectionSet(FieldCollectorNormalizedQueryParams parameters, - SelectionSet selectionSet, - List result, - GraphQLCompositeType astTypeCondition, - Set possibleObjects - ) { - for (Selection selection : selectionSet.getSelections()) { - if (selection instanceof Field) { - collectField(parameters, result, (Field) selection, possibleObjects, astTypeCondition); - } else if (selection instanceof InlineFragment) { - collectInlineFragment(parameters, result, (InlineFragment) selection, possibleObjects, astTypeCondition); - } else if (selection instanceof FragmentSpread) { - collectFragmentSpread(parameters, result, (FragmentSpread) selection, possibleObjects); } - } - } - private static class CollectedField { - Field field; - Set objectTypes; - GraphQLCompositeType astTypeCondition; + NormalizedDeferredExecution newDeferredExecution = buildDeferredExecution( + inlineFragment.getDirectives(), + newPossibleObjects + ); - public CollectedField(Field field, Set objectTypes, GraphQLCompositeType astTypeCondition) { - this.field = field; - this.objectTypes = objectTypes; - this.astTypeCondition = astTypeCondition; + collectFromSelectionSet(inlineFragment.getSelectionSet(), result, newAstTypeCondition, newPossibleObjects, newDeferredExecution); } - public boolean isAbstract() { - return GraphQLTypeUtil.isInterfaceOrUnion(astTypeCondition); + private NormalizedDeferredExecution buildDeferredExecution( + List directives, + Set newPossibleObjects) { + if (!options.deferSupport) { + return null; + } + + return IncrementalUtils.createDeferredExecution( + this.coercedVariableValues.toMap(), + directives, + (label) -> new NormalizedDeferredExecution(label, newPossibleObjects) + ); } - public boolean isConcrete() { - return GraphQLTypeUtil.isObjectType(astTypeCondition); + private void collectField(List result, + Field field, + Set possibleObjectTypes, + GraphQLCompositeType astTypeCondition, + NormalizedDeferredExecution deferredExecution + ) { + if (!conditionalNodes.shouldInclude(field, + this.coercedVariableValues.toMap(), + this.graphQLSchema, + this.options.graphQLContext)) { + return; + } + // this means there is actually no possible type for this field, and we are done + if (possibleObjectTypes.isEmpty()) { + return; + } + result.add(new CollectedField(field, possibleObjectTypes, astTypeCondition, deferredExecution)); } - } - private void collectFragmentSpread(FieldCollectorNormalizedQueryParams parameters, - List result, - FragmentSpread fragmentSpread, - Set possibleObjects - ) { - if (!conditionalNodes.shouldInclude(fragmentSpread, - parameters.getCoercedVariableValues(), - parameters.getGraphQLSchema(), - parameters.getGraphQLContext())) { - return; - } - FragmentDefinition fragmentDefinition = assertNotNull(parameters.getFragmentsByName().get(fragmentSpread.getName())); - - if (!conditionalNodes.shouldInclude(fragmentDefinition, - parameters.getCoercedVariableValues(), - parameters.getGraphQLSchema(), - parameters.getGraphQLContext())) { - return; - } - GraphQLCompositeType newAstTypeCondition = (GraphQLCompositeType) assertNotNull(parameters.getGraphQLSchema().getType(fragmentDefinition.getTypeCondition().getName())); - Set newPossibleObjects = narrowDownPossibleObjects(possibleObjects, newAstTypeCondition, parameters.getGraphQLSchema()); - collectFromSelectionSet(parameters, fragmentDefinition.getSelectionSet(), result, newAstTypeCondition, newPossibleObjects); - } + private Set narrowDownPossibleObjects(Set currentOnes, + GraphQLCompositeType typeCondition) { + ImmutableSet resolvedTypeCondition = resolvePossibleObjects(typeCondition); + if (currentOnes.isEmpty()) { + return resolvedTypeCondition; + } - private void collectInlineFragment(FieldCollectorNormalizedQueryParams parameters, - List result, - InlineFragment inlineFragment, - Set possibleObjects, - GraphQLCompositeType astTypeCondition - ) { - if (!conditionalNodes.shouldInclude(inlineFragment, parameters.getCoercedVariableValues(), parameters.getGraphQLSchema(), parameters.getGraphQLContext())) { - return; + // Faster intersection, as either set often has a size of 1. + return intersection(currentOnes, resolvedTypeCondition); } - Set newPossibleObjects = possibleObjects; - GraphQLCompositeType newAstTypeCondition = astTypeCondition; - if (inlineFragment.getTypeCondition() != null) { - newAstTypeCondition = (GraphQLCompositeType) parameters.getGraphQLSchema().getType(inlineFragment.getTypeCondition().getName()); - newPossibleObjects = narrowDownPossibleObjects(possibleObjects, newAstTypeCondition, parameters.getGraphQLSchema()); + private ImmutableSet resolvePossibleObjects(List defs) { + ImmutableSet.Builder builder = ImmutableSet.builder(); + for (GraphQLFieldDefinition def : defs) { + GraphQLUnmodifiedType outputType = unwrapAll(def.getType()); + if (outputType instanceof GraphQLCompositeType) { + builder.addAll(resolvePossibleObjects((GraphQLCompositeType) outputType)); + } + } + + return builder.build(); } - collectFromSelectionSet(parameters, inlineFragment.getSelectionSet(), result, newAstTypeCondition, newPossibleObjects); - } - private void collectField(FieldCollectorNormalizedQueryParams parameters, - List result, - Field field, - Set possibleObjectTypes, - GraphQLCompositeType astTypeCondition - ) { - if (!conditionalNodes.shouldInclude(field, - parameters.getCoercedVariableValues(), - parameters.getGraphQLSchema(), - parameters.getGraphQLContext())) { - return; + private ImmutableSet resolvePossibleObjects(GraphQLCompositeType type) { + if (type instanceof GraphQLObjectType) { + return ImmutableSet.of((GraphQLObjectType) type); + } else if (type instanceof GraphQLInterfaceType) { + return ImmutableSet.copyOf(graphQLSchema.getImplementations((GraphQLInterfaceType) type)); + } else if (type instanceof GraphQLUnionType) { + List unionTypes = ((GraphQLUnionType) type).getTypes(); + return ImmutableSet.copyOf(ImmutableKit.map(unionTypes, GraphQLObjectType.class::cast)); + } else { + return assertShouldNeverHappen(); + } } - // this means there is actually no possible type for this field, and we are done - if (possibleObjectTypes.isEmpty()) { - return; + + private static class PossibleMerger { + ExecutableNormalizedField parent; + String resultKey; + + public PossibleMerger(ExecutableNormalizedField parent, String resultKey) { + this.parent = parent; + this.resultKey = resultKey; + } } - result.add(new CollectedField(field, possibleObjectTypes, astTypeCondition)); - } - private Set narrowDownPossibleObjects(Set currentOnes, - GraphQLCompositeType typeCondition, - GraphQLSchema graphQLSchema) { + private static class CollectedField { + Field field; + Set objectTypes; + GraphQLCompositeType astTypeCondition; + NormalizedDeferredExecution deferredExecution; - ImmutableSet resolvedTypeCondition = resolvePossibleObjects(typeCondition, graphQLSchema); - if (currentOnes.isEmpty()) { - return resolvedTypeCondition; + public CollectedField(Field field, Set objectTypes, GraphQLCompositeType astTypeCondition, NormalizedDeferredExecution deferredExecution) { + this.field = field; + this.objectTypes = objectTypes; + this.astTypeCondition = astTypeCondition; + this.deferredExecution = deferredExecution; + } } - // Faster intersection, as either set often has a size of 1. - return intersection(currentOnes, resolvedTypeCondition); - } + public static class CollectNFResult { + private final Collection children; + private final ImmutableListMultimap normalizedFieldToAstFields; - private ImmutableSet resolvePossibleObjects(List defs, GraphQLSchema graphQLSchema) { - ImmutableSet.Builder builder = ImmutableSet.builder(); + public CollectNFResult(Collection children, ImmutableListMultimap normalizedFieldToAstFields) { + this.children = children; + this.normalizedFieldToAstFields = normalizedFieldToAstFields; + } + } + + private static class FieldAndAstParent { + final Field field; + final GraphQLCompositeType astParentType; - for (GraphQLFieldDefinition def : defs) { - GraphQLUnmodifiedType outputType = unwrapAll(def.getType()); - if (outputType instanceof GraphQLCompositeType) { - builder.addAll(resolvePossibleObjects((GraphQLCompositeType) outputType, graphQLSchema)); + private FieldAndAstParent(Field field, GraphQLCompositeType astParentType) { + this.field = field; + this.astParentType = astParentType; } } - return builder.build(); - } + private static class CollectedFieldGroup { + Set objectTypes; + Set fields; + Set deferredExecutions; - private ImmutableSet resolvePossibleObjects(GraphQLCompositeType type, GraphQLSchema graphQLSchema) { - if (type instanceof GraphQLObjectType) { - return ImmutableSet.of((GraphQLObjectType) type); - } else if (type instanceof GraphQLInterfaceType) { - return ImmutableSet.copyOf(graphQLSchema.getImplementations((GraphQLInterfaceType) type)); - } else if (type instanceof GraphQLUnionType) { - List unionTypes = ((GraphQLUnionType) type).getTypes(); - return ImmutableSet.copyOf(ImmutableKit.map(unionTypes, GraphQLObjectType.class::cast)); - } else { - return assertShouldNeverHappen(); + public CollectedFieldGroup(Set fields, Set objectTypes, Set deferredExecutions) { + this.fields = fields; + this.objectTypes = objectTypes; + this.deferredExecutions = deferredExecutions; + } } } + } diff --git a/src/main/java/graphql/normalized/ExecutableNormalizedOperationToAstCompiler.java b/src/main/java/graphql/normalized/ExecutableNormalizedOperationToAstCompiler.java index 7dc3d11f32..877decb767 100644 --- a/src/main/java/graphql/normalized/ExecutableNormalizedOperationToAstCompiler.java +++ b/src/main/java/graphql/normalized/ExecutableNormalizedOperationToAstCompiler.java @@ -3,6 +3,8 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import graphql.Assert; +import graphql.Directives; +import graphql.ExperimentalApi; import graphql.PublicApi; import graphql.execution.directives.QueryDirectives; import graphql.introspection.Introspection; @@ -18,8 +20,10 @@ import graphql.language.OperationDefinition; import graphql.language.Selection; import graphql.language.SelectionSet; +import graphql.language.StringValue; import graphql.language.TypeName; import graphql.language.Value; +import graphql.normalized.incremental.NormalizedDeferredExecution; import graphql.schema.GraphQLCompositeType; import graphql.schema.GraphQLFieldDefinition; import graphql.schema.GraphQLObjectType; @@ -30,8 +34,10 @@ import java.util.ArrayList; import java.util.LinkedHashMap; +import java.util.LinkedHashSet; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.stream.Collectors; import static graphql.collect.ImmutableKit.emptyList; @@ -46,7 +52,7 @@ /** * This class can take a list of {@link ExecutableNormalizedField}s and compiling out a * normalised operation {@link Document} that would represent how those fields - * maybe executed. + * may be executed. *

* This is essentially the reverse of {@link ExecutableNormalizedOperationFactory} which takes * operation text and makes {@link ExecutableNormalizedField}s from it, this takes {@link ExecutableNormalizedField}s @@ -82,7 +88,7 @@ public Map getVariables() { /** * This will compile an operation text {@link Document} with possibly variables from the given {@link ExecutableNormalizedField}s - * + *

* The {@link VariablePredicate} is used called to decide if the given argument values should be made into a variable * OR inlined into the operation text as a graphql literal. * @@ -99,21 +105,21 @@ public static CompilerResult compileToDocument(@NotNull GraphQLSchema schema, @Nullable String operationName, @NotNull List topLevelFields, @Nullable VariablePredicate variablePredicate) { - return compileToDocument(schema,operationKind,operationName,topLevelFields,Map.of(),variablePredicate); + return compileToDocument(schema, operationKind, operationName, topLevelFields, Map.of(), variablePredicate); } /** * This will compile an operation text {@link Document} with possibly variables from the given {@link ExecutableNormalizedField}s - * + *

* The {@link VariablePredicate} is used called to decide if the given argument values should be made into a variable * OR inlined into the operation text as a graphql literal. * - * @param schema the graphql schema to use - * @param operationKind the kind of operation - * @param operationName the name of the operation to use - * @param topLevelFields the top level {@link ExecutableNormalizedField}s to start from - * @param normalizedFieldToQueryDirectives the map of normalized field to query directives - * @param variablePredicate the variable predicate that decides if arguments turn into variables or not during compilation + * @param schema the graphql schema to use + * @param operationKind the kind of operation + * @param operationName the name of the operation to use + * @param topLevelFields the top level {@link ExecutableNormalizedField}s to start from + * @param normalizedFieldToQueryDirectives the map of normalized field to query directives + * @param variablePredicate the variable predicate that decides if arguments turn into variables or not during compilation * * @return a {@link CompilerResult} object */ @@ -123,10 +129,75 @@ public static CompilerResult compileToDocument(@NotNull GraphQLSchema schema, @NotNull List topLevelFields, @NotNull Map normalizedFieldToQueryDirectives, @Nullable VariablePredicate variablePredicate) { + return compileToDocument(schema, operationKind, operationName, topLevelFields, normalizedFieldToQueryDirectives, variablePredicate, false); + } + + + /** + * This will compile an operation text {@link Document} with possibly variables from the given {@link ExecutableNormalizedField}s, with support for the experimental @defer directive. + *

+ * The {@link VariablePredicate} is used called to decide if the given argument values should be made into a variable + * OR inlined into the operation text as a graphql literal. + * + * @param schema the graphql schema to use + * @param operationKind the kind of operation + * @param operationName the name of the operation to use + * @param topLevelFields the top level {@link ExecutableNormalizedField}s to start from + * @param variablePredicate the variable predicate that decides if arguments turn into variables or not during compilation + * + * @return a {@link CompilerResult} object + * + * @see ExecutableNormalizedOperationToAstCompiler#compileToDocument(GraphQLSchema, OperationDefinition.Operation, String, List, VariablePredicate) + */ + @ExperimentalApi + public static CompilerResult compileToDocumentWithDeferSupport(@NotNull GraphQLSchema schema, + @NotNull OperationDefinition.Operation operationKind, + @Nullable String operationName, + @NotNull List topLevelFields, + @Nullable VariablePredicate variablePredicate + ) { + return compileToDocumentWithDeferSupport(schema, operationKind, operationName, topLevelFields, Map.of(), variablePredicate); + } + + /** + * This will compile an operation text {@link Document} with possibly variables from the given {@link ExecutableNormalizedField}s, with support for the experimental @defer directive. + *

+ * The {@link VariablePredicate} is used called to decide if the given argument values should be made into a variable + * OR inlined into the operation text as a graphql literal. + * + * @param schema the graphql schema to use + * @param operationKind the kind of operation + * @param operationName the name of the operation to use + * @param topLevelFields the top level {@link ExecutableNormalizedField}s to start from + * @param normalizedFieldToQueryDirectives the map of normalized field to query directives + * @param variablePredicate the variable predicate that decides if arguments turn into variables or not during compilation + * + * @return a {@link CompilerResult} object + * + * @see ExecutableNormalizedOperationToAstCompiler#compileToDocument(GraphQLSchema, OperationDefinition.Operation, String, List, Map, VariablePredicate) + */ + @ExperimentalApi + public static CompilerResult compileToDocumentWithDeferSupport(@NotNull GraphQLSchema schema, + @NotNull OperationDefinition.Operation operationKind, + @Nullable String operationName, + @NotNull List topLevelFields, + @NotNull Map normalizedFieldToQueryDirectives, + @Nullable VariablePredicate variablePredicate + ) { + return compileToDocument(schema, operationKind, operationName, topLevelFields, normalizedFieldToQueryDirectives, variablePredicate, true); + } + + private static CompilerResult compileToDocument(@NotNull GraphQLSchema schema, + @NotNull OperationDefinition.Operation operationKind, + @Nullable String operationName, + @NotNull List topLevelFields, + @NotNull Map normalizedFieldToQueryDirectives, + @Nullable VariablePredicate variablePredicate, + boolean deferSupport) { GraphQLObjectType operationType = getOperationType(schema, operationKind); VariableAccumulator variableAccumulator = new VariableAccumulator(variablePredicate); - List> selections = subselectionsForNormalizedField(schema, operationType.getName(), topLevelFields, normalizedFieldToQueryDirectives, variableAccumulator); + List> selections = subselectionsForNormalizedField(schema, operationType.getName(), topLevelFields, normalizedFieldToQueryDirectives, variableAccumulator, deferSupport); SelectionSet selectionSet = new SelectionSet(selections); OperationDefinition.Builder definitionBuilder = OperationDefinition.newOperationDefinition() @@ -148,7 +219,20 @@ private static List> subselectionsForNormalizedField(GraphQLSchema @NotNull String parentOutputType, List executableNormalizedFields, @NotNull Map normalizedFieldToQueryDirectives, - VariableAccumulator variableAccumulator) { + VariableAccumulator variableAccumulator, + boolean deferSupport) { + if (deferSupport) { + return subselectionsForNormalizedFieldWithDeferSupport(schema, parentOutputType, executableNormalizedFields, normalizedFieldToQueryDirectives, variableAccumulator); + } else { + return subselectionsForNormalizedFieldNoDeferSupport(schema, parentOutputType, executableNormalizedFields, normalizedFieldToQueryDirectives, variableAccumulator); + } + } + + private static List> subselectionsForNormalizedFieldNoDeferSupport(GraphQLSchema schema, + @NotNull String parentOutputType, + List executableNormalizedFields, + @NotNull Map normalizedFieldToQueryDirectives, + VariableAccumulator variableAccumulator) { ImmutableList.Builder> selections = ImmutableList.builder(); // All conditional fields go here instead of directly to selections, so they can be grouped together @@ -157,13 +241,13 @@ private static List> subselectionsForNormalizedField(GraphQLSchema for (ExecutableNormalizedField nf : executableNormalizedFields) { if (nf.isConditional(schema)) { - selectionForNormalizedField(schema, nf, normalizedFieldToQueryDirectives, variableAccumulator) + selectionForNormalizedField(schema, nf, normalizedFieldToQueryDirectives, variableAccumulator, false) .forEach((objectTypeName, field) -> fieldsByTypeCondition .computeIfAbsent(objectTypeName, ignored -> new ArrayList<>()) .add(field)); } else { - selections.add(selectionForNormalizedField(schema, parentOutputType, nf, normalizedFieldToQueryDirectives,variableAccumulator)); + selections.add(selectionForNormalizedField(schema, parentOutputType, nf, normalizedFieldToQueryDirectives, variableAccumulator, false)); } } @@ -179,17 +263,89 @@ private static List> subselectionsForNormalizedField(GraphQLSchema return selections.build(); } + + private static List> subselectionsForNormalizedFieldWithDeferSupport(GraphQLSchema schema, + @NotNull String parentOutputType, + List executableNormalizedFields, + @NotNull Map normalizedFieldToQueryDirectives, + VariableAccumulator variableAccumulator) { + ImmutableList.Builder> selections = ImmutableList.builder(); + + // All conditional and deferred fields go here instead of directly to selections, so they can be grouped together + // in the same inline fragment in the output + // + Map> fieldsByFragmentDetails = new LinkedHashMap<>(); + + for (ExecutableNormalizedField nf : executableNormalizedFields) { + LinkedHashSet deferredExecutions = nf.getDeferredExecutions(); + + if (nf.isConditional(schema)) { + selectionForNormalizedField(schema, nf, normalizedFieldToQueryDirectives, variableAccumulator, true) + .forEach((objectTypeName, field) -> { + if (deferredExecutions == null || deferredExecutions.isEmpty()) { + fieldsByFragmentDetails + .computeIfAbsent(new ExecutionFragmentDetails(objectTypeName, null), ignored -> new ArrayList<>()) + .add(field); + } else { + deferredExecutions.forEach(deferredExecution -> { + fieldsByFragmentDetails + .computeIfAbsent(new ExecutionFragmentDetails(objectTypeName, deferredExecution), ignored -> new ArrayList<>()) + .add(field); + }); + } + }); + + } else if (deferredExecutions != null && !deferredExecutions.isEmpty()) { + Field field = selectionForNormalizedField(schema, parentOutputType, nf, normalizedFieldToQueryDirectives, variableAccumulator, true); + + deferredExecutions.forEach(deferredExecution -> { + fieldsByFragmentDetails + .computeIfAbsent(new ExecutionFragmentDetails(null, deferredExecution), ignored -> new ArrayList<>()) + .add(field); + }); + } else { + selections.add(selectionForNormalizedField(schema, parentOutputType, nf, normalizedFieldToQueryDirectives, variableAccumulator, true)); + } + } + + fieldsByFragmentDetails.forEach((typeAndDeferPair, fields) -> { + InlineFragment.Builder fragmentBuilder = newInlineFragment() + .selectionSet(selectionSet(fields)); + + if (typeAndDeferPair.typeName != null) { + TypeName typeName = newTypeName(typeAndDeferPair.typeName).build(); + fragmentBuilder.typeCondition(typeName); + } + + if (typeAndDeferPair.deferredExecution != null) { + Directive.Builder deferBuilder = Directive.newDirective().name(Directives.DeferDirective.getName()); + + if (typeAndDeferPair.deferredExecution.getLabel() != null) { + deferBuilder.argument(newArgument().name("label").value(StringValue.of(typeAndDeferPair.deferredExecution.getLabel())).build()); + } + + fragmentBuilder.directive(deferBuilder.build()); + } + + + selections.add(fragmentBuilder.build()); + }); + + return selections.build(); + } + /** * @return Map of object type names to list of fields */ private static Map selectionForNormalizedField(GraphQLSchema schema, ExecutableNormalizedField executableNormalizedField, @NotNull Map normalizedFieldToQueryDirectives, - VariableAccumulator variableAccumulator) { + VariableAccumulator variableAccumulator, + boolean deferSupport) { Map groupedFields = new LinkedHashMap<>(); for (String objectTypeName : executableNormalizedField.getObjectTypeNames()) { - groupedFields.put(objectTypeName, selectionForNormalizedField(schema, objectTypeName, executableNormalizedField,normalizedFieldToQueryDirectives, variableAccumulator)); + groupedFields.put(objectTypeName, selectionForNormalizedField(schema, objectTypeName, executableNormalizedField, normalizedFieldToQueryDirectives, variableAccumulator, deferSupport)); } return groupedFields; @@ -202,7 +358,8 @@ private static Field selectionForNormalizedField(GraphQLSchema schema, String objectTypeName, ExecutableNormalizedField executableNormalizedField, @NotNull Map normalizedFieldToQueryDirectives, - VariableAccumulator variableAccumulator) { + VariableAccumulator variableAccumulator, + boolean deferSupport) { final List> subSelections; if (executableNormalizedField.getChildren().isEmpty()) { subSelections = emptyList(); @@ -215,7 +372,8 @@ private static Field selectionForNormalizedField(GraphQLSchema schema, fieldOutputType.getName(), executableNormalizedField.getChildren(), normalizedFieldToQueryDirectives, - variableAccumulator + variableAccumulator, + deferSupport ); } @@ -230,9 +388,9 @@ private static Field selectionForNormalizedField(GraphQLSchema schema, .alias(executableNormalizedField.getAlias()) .selectionSet(selectionSet) .arguments(arguments); - if(queryDirectives == null || queryDirectives.getImmediateAppliedDirectivesByField().isEmpty() ){ + if (queryDirectives == null || queryDirectives.getImmediateAppliedDirectivesByField().isEmpty()) { return builder.build(); - }else { + } else { List directives = queryDirectives.getImmediateAppliedDirectivesByField().keySet().stream().flatMap(field -> field.getDirectives().stream()).collect(Collectors.toList()); return builder .directives(directives) @@ -326,4 +484,33 @@ private static GraphQLObjectType getOperationType(@NotNull GraphQLSchema schema, return Assert.assertShouldNeverHappen("Unknown operation kind " + operationKind); } + /** + * Represents important execution details that can be associated with a fragment. + */ + private static class ExecutionFragmentDetails { + private final String typeName; + private final NormalizedDeferredExecution deferredExecution; + + public ExecutionFragmentDetails(String typeName, NormalizedDeferredExecution deferredExecution) { + this.typeName = typeName; + this.deferredExecution = deferredExecution; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ExecutionFragmentDetails that = (ExecutionFragmentDetails) o; + return Objects.equals(typeName, that.typeName) && Objects.equals(deferredExecution, that.deferredExecution); + } + + @Override + public int hashCode() { + return Objects.hash(typeName, deferredExecution); + } + } } diff --git a/src/main/java/graphql/normalized/FieldCollectorNormalizedQueryParams.java b/src/main/java/graphql/normalized/FieldCollectorNormalizedQueryParams.java deleted file mode 100644 index 8b4d03bf3e..0000000000 --- a/src/main/java/graphql/normalized/FieldCollectorNormalizedQueryParams.java +++ /dev/null @@ -1,136 +0,0 @@ -package graphql.normalized; - -import graphql.Assert; -import graphql.GraphQLContext; -import graphql.Internal; -import graphql.language.FragmentDefinition; -import graphql.schema.GraphQLSchema; -import org.jetbrains.annotations.NotNull; -import org.jetbrains.annotations.Nullable; - -import java.util.ArrayList; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Locale; -import java.util.Map; - -@Internal -public class FieldCollectorNormalizedQueryParams { - private final GraphQLSchema graphQLSchema; - private final Map fragmentsByName; - private final Map coercedVariableValues; - private final Map normalizedVariableValues; - private final GraphQLContext graphQLContext; - private final Locale locale; - - private final List possibleMergerList = new ArrayList<>(); - - public static class PossibleMerger { - ExecutableNormalizedField parent; - String resultKey; - - public PossibleMerger(ExecutableNormalizedField parent, String resultKey) { - this.parent = parent; - this.resultKey = resultKey; - } - } - - public void addPossibleMergers(ExecutableNormalizedField parent, String resultKey) { - possibleMergerList.add(new PossibleMerger(parent, resultKey)); - } - - public List getPossibleMergerList() { - return possibleMergerList; - } - - public GraphQLSchema getGraphQLSchema() { - return graphQLSchema; - } - - public Map getFragmentsByName() { - return fragmentsByName; - } - - @NotNull - public Map getCoercedVariableValues() { - return coercedVariableValues; - } - - @Nullable - public Map getNormalizedVariableValues() { - return normalizedVariableValues; - } - - public GraphQLContext getGraphQLContext() { - return graphQLContext; - } - - public Locale getLocale() { - return locale; - } - - private FieldCollectorNormalizedQueryParams(Builder builder) { - this.fragmentsByName = builder.fragmentsByName; - this.graphQLSchema = builder.graphQLSchema; - this.coercedVariableValues = builder.coercedVariableValues; - this.normalizedVariableValues = builder.normalizedVariableValues; - this.graphQLContext = builder.graphQLContext; - this.locale = builder.locale; - } - - public static Builder newParameters() { - return new Builder(); - } - - public static class Builder { - private GraphQLSchema graphQLSchema; - private final Map fragmentsByName = new LinkedHashMap<>(); - private final Map coercedVariableValues = new LinkedHashMap<>(); - private Map normalizedVariableValues; - private GraphQLContext graphQLContext = GraphQLContext.getDefault(); - private Locale locale = Locale.getDefault(); - - /** - * @see FieldCollectorNormalizedQueryParams#newParameters() - */ - private Builder() { - - } - - public Builder schema(GraphQLSchema graphQLSchema) { - this.graphQLSchema = graphQLSchema; - return this; - } - - public Builder fragments(Map fragmentsByName) { - this.fragmentsByName.putAll(fragmentsByName); - return this; - } - - public Builder coercedVariables(Map coercedVariableValues) { - this.coercedVariableValues.putAll(coercedVariableValues); - return this; - } - - public Builder normalizedVariables(Map normalizedVariableValues) { - this.normalizedVariableValues = normalizedVariableValues; - return this; - } - - public Builder graphQLContext(GraphQLContext graphQLContext) { - this.graphQLContext = graphQLContext; - return this; - } - - public Builder locale(Locale locale) { - this.locale = locale; - return this; - } - - public FieldCollectorNormalizedQueryParams build() { - Assert.assertNotNull(graphQLSchema, () -> "You must provide a schema"); - return new FieldCollectorNormalizedQueryParams(this); - } - - } -} diff --git a/src/main/java/graphql/normalized/NormalizedInputValue.java b/src/main/java/graphql/normalized/NormalizedInputValue.java index 390bac032a..1593d07135 100644 --- a/src/main/java/graphql/normalized/NormalizedInputValue.java +++ b/src/main/java/graphql/normalized/NormalizedInputValue.java @@ -1,5 +1,6 @@ package graphql.normalized; +import graphql.Assert; import graphql.PublicApi; import graphql.language.Value; @@ -7,6 +8,7 @@ import static graphql.Assert.assertNotNull; import static graphql.Assert.assertTrue; +import static graphql.Assert.assertTrue; import static graphql.Assert.assertValidName; import static graphql.language.AstPrinter.printAst; @@ -99,14 +101,14 @@ private boolean isListOnly(String typeName) { private String unwrapOne(String typeName) { assertNotNull(typeName); - assertTrue(typeName.trim().length() > 0, () -> "We have an empty type name unwrapped"); + Assert.assertTrue(typeName.trim().length() > 0, () -> "We have an empty type name unwrapped"); if (typeName.endsWith("!")) { return typeName.substring(0, typeName.length() - 1); } if (isListOnly(typeName)) { // nominally this will never be true - but better to be safe than sorry - assertTrue(typeName.startsWith("["), () -> String.format("We have a unbalanced list type string '%s'", typeName)); - assertTrue(typeName.endsWith("]"), () -> String.format("We have a unbalanced list type string '%s'", typeName)); + assertTrue(typeName.startsWith("["), "We have a unbalanced list type string '%s'", typeName); + assertTrue(typeName.endsWith("]"), "We have a unbalanced list type string '%s'", typeName); return typeName.substring(1, typeName.length() - 1); } diff --git a/src/main/java/graphql/normalized/incremental/NormalizedDeferredExecution.java b/src/main/java/graphql/normalized/incremental/NormalizedDeferredExecution.java new file mode 100644 index 0000000000..a3f789e063 --- /dev/null +++ b/src/main/java/graphql/normalized/incremental/NormalizedDeferredExecution.java @@ -0,0 +1,129 @@ +package graphql.normalized.incremental; + +import graphql.ExperimentalApi; +import graphql.schema.GraphQLObjectType; + +import javax.annotation.Nullable; +import java.util.Set; + +/** + * Represents details about the defer execution that can be associated with a {@link graphql.normalized.ExecutableNormalizedField}. + * + * Taking this schema as an example: + *

+ *     type Query { animal: Animal }
+ *     interface Animal { name: String, age: Int }
+ *     type Cat implements Animal { name: String, age: Int }
+ *     type Dog implements Animal { name: String, age: Int }
+ * 
+ * + * An ENF can be associated with multiple `NormalizedDeferredExecution`s + * + * For example, this query: + *
+ *     query MyQuery {
+ *         animal {
+ *             ... @defer {
+ *                 name
+ *             }
+ *             ... @defer {
+ *                 name
+ *             }
+ *         }
+ *     }
+ * 
+ * + * Would result in one ENF (name) associated with 2 `NormalizedDeferredExecution` instances. This is relevant for the execution + * since the field would have to be included in 2 incremental payloads. (I know, there's some duplication here, but + * this is the current state of the spec. There are some discussions happening around de-duplicating data in scenarios + * like this, so this behaviour might change in the future). + * + * A `NormalizedDeferredExecution` may be associated with a list of possible types + * + * For example, this query: + *
+ *     query MyQuery {
+ *         animal {
+ *             ... @defer {
+ *                 name
+ *             }
+ *         }
+ *     }
+ * 
+ * results in a `NormalizedDeferredExecution` with no label and possible types [Dog, Cat] + * + * A `NormalizedDeferredExecution` may be associated with specific types + * For example, this query: + *
+ *     query MyQuery {
+ *         animal {
+ *             ... on Cat @defer {
+ *                 name
+ *             }
+ *             ... on Dog {
+ *                 name
+ *             }
+ *         }
+ *     }
+ * 
+ * results in a single ENF (name) associated with a `NormalizedDeferredExecution` with only "Cat" as a possible type. This means + * that, at execution time, `name` should be deferred only if the return object is a "Cat" (but not a if it is a "Dog"). + * + * ENFs associated with the same instance of `NormalizedDeferredExecution` will be resolved in the same incremental response payload + * For example, take these queries: + * + *
+ *     query Query1 {
+ *         animal {
+ *             ... @defer {
+ *                 name
+ *             }
+ *             ... @defer {
+ *                 age
+ *             }
+ *         }
+ *     }
+ *
+ *     query Query2 {
+ *         animal {
+ *             ... @defer {
+ *                 name
+ *                 age
+ *             }
+ *         }
+ *     }
+ * 
+ * + * In `Query1`, the ENFs name and age are associated with different instances of `NormalizedDeferredExecution`. This means that, + * during execution, `name` and `age` can be delivered at different times (if name is resolved faster, it will be + * delivered first, and vice-versa). + * In `Query2` the fields will share the same instance of `NormalizedDeferredExecution`. This ensures that, at execution time, the + * fields are guaranteed to be delivered together. In other words, execution should wait until the slowest field resolves + * and deliver both fields at the same time. + * + */ +@ExperimentalApi +public class NormalizedDeferredExecution { + private final String label; + private final Set possibleTypes; + + public NormalizedDeferredExecution(@Nullable String label, Set possibleTypes) { + this.label = label; + this.possibleTypes = possibleTypes; + } + + /** + * @return the label associated with this defer declaration + */ + @Nullable + public String getLabel() { + return label; + } + + /** + * @return the concrete object types that are associated with this defer execution + */ + public Set getPossibleTypes() { + return possibleTypes; + } +} diff --git a/src/main/java/graphql/parser/Parser.java b/src/main/java/graphql/parser/Parser.java index a2bf6b9058..15a0f5f641 100644 --- a/src/main/java/graphql/parser/Parser.java +++ b/src/main/java/graphql/parser/Parser.java @@ -1,7 +1,6 @@ package graphql.parser; import com.google.common.collect.ImmutableList; -import graphql.DeprecatedAt; import graphql.Internal; import graphql.PublicApi; import graphql.language.Document; @@ -137,84 +136,29 @@ public Document parseDocument(ParserEnvironment environment) throws InvalidSynta * @throws InvalidSyntaxException if the input is not valid graphql syntax */ public Document parseDocument(String input) throws InvalidSyntaxException { - return parseDocument(input, (ParserOptions) null); - } - - /** - * Parses reader input into a graphql AST {@link Document} - * - * @param reader the reader input to parse - * - * @return an AST {@link Document} - * - * @throws InvalidSyntaxException if the input is not valid graphql syntax - */ - public Document parseDocument(Reader reader) throws InvalidSyntaxException { - ParserEnvironment parserEnvironment = ParserEnvironment.newParserEnvironment() - .document(reader) - .build(); - return parseDocumentImpl(parserEnvironment); - } - - /** - * Parses a string input into a graphql AST {@link Document} - * - * @param input the input to parse - * @param sourceName - the name to attribute to the input text in {@link SourceLocation#getSourceName()} - * - * @return an AST {@link Document} - * - * @throws InvalidSyntaxException if the input is not valid graphql syntax - * @deprecated use {#{@link #parse(ParserEnvironment)}} instead - */ - @DeprecatedAt("2022-08-31") - @Deprecated - public Document parseDocument(String input, String sourceName) throws InvalidSyntaxException { MultiSourceReader multiSourceReader = MultiSourceReader.newMultiSourceReader() - .string(input, sourceName) + .string(input, null) .trackData(true) .build(); - return parseDocument(multiSourceReader); - } - /** - * Parses a string input into a graphql AST {@link Document} - * - * @param input the input to parse - * @param parserOptions the parser options - * - * @return an AST {@link Document} - * - * @throws InvalidSyntaxException if the input is not valid graphql syntax - * @deprecated use {#{@link #parse(ParserEnvironment)}} instead - */ - @DeprecatedAt("2022-08-31") - @Deprecated - public Document parseDocument(String input, ParserOptions parserOptions) throws InvalidSyntaxException { - MultiSourceReader multiSourceReader = MultiSourceReader.newMultiSourceReader() - .string(input, null) - .trackData(true) + ParserEnvironment parserEnvironment = ParserEnvironment.newParserEnvironment() + .document(multiSourceReader) .build(); - return parseDocument(multiSourceReader, parserOptions); + return parseDocumentImpl(parserEnvironment); } /** * Parses reader input into a graphql AST {@link Document} * - * @param reader the reader input to parse - * @param parserOptions the parser options + * @param reader the reader input to parse * * @return an AST {@link Document} * * @throws InvalidSyntaxException if the input is not valid graphql syntax - * @deprecated use {#{@link #parse(ParserEnvironment)}} instead */ - @DeprecatedAt("2022-08-31") - @Deprecated - public Document parseDocument(Reader reader, ParserOptions parserOptions) throws InvalidSyntaxException { + public Document parseDocument(Reader reader) throws InvalidSyntaxException { ParserEnvironment parserEnvironment = ParserEnvironment.newParserEnvironment() .document(reader) - .parserOptions(parserOptions) .build(); return parseDocumentImpl(parserEnvironment); } diff --git a/src/main/java/graphql/scalar/GraphqlBooleanCoercing.java b/src/main/java/graphql/scalar/GraphqlBooleanCoercing.java index 266e64c4a5..8c06b874e6 100644 --- a/src/main/java/graphql/scalar/GraphqlBooleanCoercing.java +++ b/src/main/java/graphql/scalar/GraphqlBooleanCoercing.java @@ -15,7 +15,6 @@ import java.math.BigDecimal; import java.util.Locale; -import static graphql.Assert.assertNotNull; import static graphql.Assert.assertShouldNeverHappen; import static graphql.scalar.CoercingUtil.i18nMsg; import static graphql.scalar.CoercingUtil.isNumberIsh; @@ -68,13 +67,12 @@ private Boolean serializeImpl(@NotNull Object input, @NotNull Locale locale) { @NotNull private Boolean parseValueImpl(@NotNull Object input, @NotNull Locale locale) { - Boolean result = convertImpl(input); - if (result == null) { + if (!(input instanceof Boolean)) { throw new CoercingParseValueException( - i18nMsg(locale, "Boolean.notBoolean", typeName(input)) + i18nMsg(locale, "Boolean.unexpectedRawValueType", typeName(input)) ); } - return result; + return (Boolean) input; } private static boolean parseLiteralImpl(@NotNull Object input, @NotNull Locale locale) { @@ -88,7 +86,10 @@ private static boolean parseLiteralImpl(@NotNull Object input, @NotNull Locale l @NotNull private BooleanValue valueToLiteralImpl(@NotNull Object input, @NotNull Locale locale) { - Boolean result = assertNotNull(convertImpl(input), () -> i18nMsg(locale, "Boolean.notBoolean", typeName(input))); + Boolean result = convertImpl(input); + if (result == null) { + assertShouldNeverHappen(i18nMsg(locale, "Boolean.notBoolean", typeName(input))); + } return BooleanValue.newBooleanValue(result).build(); } @@ -127,7 +128,7 @@ public Boolean parseLiteral(@NotNull Object input) { @Override @Deprecated - public Value valueToLiteral(@NotNull Object input) { + public @NotNull Value valueToLiteral(@NotNull Object input) { return valueToLiteralImpl(input, Locale.getDefault()); } diff --git a/src/main/java/graphql/scalar/GraphqlFloatCoercing.java b/src/main/java/graphql/scalar/GraphqlFloatCoercing.java index 58329e56f3..7efa270274 100644 --- a/src/main/java/graphql/scalar/GraphqlFloatCoercing.java +++ b/src/main/java/graphql/scalar/GraphqlFloatCoercing.java @@ -16,7 +16,7 @@ import java.math.BigDecimal; import java.util.Locale; -import static graphql.Assert.assertNotNull; +import static graphql.Assert.assertShouldNeverHappen; import static graphql.scalar.CoercingUtil.i18nMsg; import static graphql.scalar.CoercingUtil.isNumberIsh; import static graphql.scalar.CoercingUtil.typeName; @@ -65,6 +65,12 @@ private Double serialiseImpl(Object input, @NotNull Locale locale) { @NotNull private Double parseValueImpl(@NotNull Object input, @NotNull Locale locale) { + if (!(input instanceof Number)) { + throw new CoercingParseValueException( + i18nMsg(locale, "Float.unexpectedRawValueType", typeName(input)) + ); + } + Double result = convertImpl(input); if (result == null) { throw new CoercingParseValueException( @@ -89,7 +95,10 @@ private static double parseLiteralImpl(@NotNull Object input, @NotNull Locale lo @NotNull private FloatValue valueToLiteralImpl(Object input, @NotNull Locale locale) { - Double result = assertNotNull(convertImpl(input), () -> i18nMsg(locale, "Float.notFloat", typeName(input))); + Double result = convertImpl(input); + if (result == null) { + assertShouldNeverHappen(i18nMsg(locale, "Float.notFloat", typeName(input))); + } return FloatValue.newFloatValue(BigDecimal.valueOf(result)).build(); } diff --git a/src/main/java/graphql/scalar/GraphqlIDCoercing.java b/src/main/java/graphql/scalar/GraphqlIDCoercing.java index 76e78e7917..4631c93c5d 100644 --- a/src/main/java/graphql/scalar/GraphqlIDCoercing.java +++ b/src/main/java/graphql/scalar/GraphqlIDCoercing.java @@ -18,6 +18,7 @@ import java.util.UUID; import static graphql.Assert.assertNotNull; +import static graphql.Assert.assertShouldNeverHappen; import static graphql.scalar.CoercingUtil.i18nMsg; import static graphql.scalar.CoercingUtil.typeName; @@ -84,7 +85,10 @@ private String parseLiteralImpl(Object input, @NotNull Locale locale) { @NotNull private StringValue valueToLiteralImpl(Object input, @NotNull Locale locale) { - String result = assertNotNull(convertImpl(input), () -> i18nMsg(locale, "ID.notId", typeName(input))); + String result = convertImpl(input); + if (result == null) { + assertShouldNeverHappen(i18nMsg(locale, "ID.notId", typeName(input))); + } return StringValue.newStringValue(result).build(); } diff --git a/src/main/java/graphql/scalar/GraphqlIntCoercing.java b/src/main/java/graphql/scalar/GraphqlIntCoercing.java index 99e9eeb84b..cf428fc8cf 100644 --- a/src/main/java/graphql/scalar/GraphqlIntCoercing.java +++ b/src/main/java/graphql/scalar/GraphqlIntCoercing.java @@ -16,7 +16,7 @@ import java.math.BigInteger; import java.util.Locale; -import static graphql.Assert.assertNotNull; +import static graphql.Assert.assertShouldNeverHappen; import static graphql.scalar.CoercingUtil.i18nMsg; import static graphql.scalar.CoercingUtil.isNumberIsh; import static graphql.scalar.CoercingUtil.typeName; @@ -64,15 +64,44 @@ private Integer serialiseImpl(Object input, @NotNull Locale locale) { @NotNull private Integer parseValueImpl(@NotNull Object input, @NotNull Locale locale) { - Integer result = convertImpl(input); + if (!(input instanceof Number)) { + throw new CoercingParseValueException( + i18nMsg(locale, "Int.notInt", typeName(input)) + ); + } + if (input instanceof Integer) { + return (Integer) input; + } + + BigInteger result = convertParseValueImpl(input); if (result == null) { throw new CoercingParseValueException( i18nMsg(locale, "Int.notInt", typeName(input)) ); } + if (result.compareTo(INT_MIN) < 0 || result.compareTo(INT_MAX) > 0) { + throw new CoercingParseValueException( + i18nMsg(locale, "Int.outsideRange", result.toString()) + ); + } + return result.intValueExact(); + } - return result; + private BigInteger convertParseValueImpl(Object input) { + BigDecimal value; + try { + value = new BigDecimal(input.toString()); + } catch (NumberFormatException e) { + return null; + } + + try { + return value.toBigIntegerExact(); + } catch (ArithmeticException e) { + // Exception if number has non-zero fractional part + return null; + } } private static int parseLiteralImpl(Object input, @NotNull Locale locale) { @@ -91,7 +120,10 @@ private static int parseLiteralImpl(Object input, @NotNull Locale locale) { } private IntValue valueToLiteralImpl(Object input, @NotNull Locale locale) { - Integer result = assertNotNull(convertImpl(input),() -> i18nMsg(locale, "Int.notInt", typeName(input))); + Integer result = convertImpl(input); + if (result == null) { + assertShouldNeverHappen(i18nMsg(locale, "Int.notInt", typeName(input))); + } return IntValue.newIntValue(BigInteger.valueOf(result)).build(); } @@ -131,7 +163,7 @@ public Integer parseLiteral(@NotNull Object input) { @Override @Deprecated - public Value valueToLiteral(@NotNull Object input) { + public @NotNull Value valueToLiteral(@NotNull Object input) { return valueToLiteralImpl(input, Locale.getDefault()); } diff --git a/src/main/java/graphql/scalar/GraphqlStringCoercing.java b/src/main/java/graphql/scalar/GraphqlStringCoercing.java index 9b0d6b84ae..9040cc03f8 100644 --- a/src/main/java/graphql/scalar/GraphqlStringCoercing.java +++ b/src/main/java/graphql/scalar/GraphqlStringCoercing.java @@ -28,6 +28,15 @@ private String toStringImpl(Object input) { return String.valueOf(input); } + private String parseValueImpl(@NotNull Object input, @NotNull Locale locale) { + if (!(input instanceof String)) { + throw new CoercingParseValueException( + i18nMsg(locale, "String.unexpectedRawValueType", typeName(input)) + ); + } + return (String) input; + } + private String parseLiteralImpl(@NotNull Object input, Locale locale) { if (!(input instanceof StringValue)) { throw new CoercingParseLiteralException( @@ -55,12 +64,12 @@ public String serialize(@NotNull Object dataFetcherResult) { @Override @Deprecated public String parseValue(@NotNull Object input) { - return toStringImpl(input); + return parseValueImpl(input, Locale.getDefault()); } @Override public String parseValue(@NotNull Object input, @NotNull GraphQLContext graphQLContext, @NotNull Locale locale) throws CoercingParseValueException { - return toStringImpl(input); + return parseValueImpl(input, locale); } @Override @@ -76,7 +85,7 @@ public String parseLiteral(@NotNull Object input) { @Override @Deprecated - public Value valueToLiteral(@NotNull Object input) { + public @NotNull Value valueToLiteral(@NotNull Object input) { return valueToLiteralImpl(input); } diff --git a/src/main/java/graphql/schema/CodeRegistryVisitor.java b/src/main/java/graphql/schema/CodeRegistryVisitor.java index 50cfa2a492..166ed6239c 100644 --- a/src/main/java/graphql/schema/CodeRegistryVisitor.java +++ b/src/main/java/graphql/schema/CodeRegistryVisitor.java @@ -40,7 +40,7 @@ public TraversalControl visitGraphQLInterfaceType(GraphQLInterfaceType node, Tra codeRegistry.typeResolverIfAbsent(node, typeResolver); } assertTrue(codeRegistry.getTypeResolver(node) != null, - () -> String.format("You MUST provide a type resolver for the interface type '%s'", node.getName())); + "You MUST provide a type resolver for the interface type '%s'", node.getName()); return CONTINUE; } @@ -51,7 +51,7 @@ public TraversalControl visitGraphQLUnionType(GraphQLUnionType node, TraverserCo codeRegistry.typeResolverIfAbsent(node, typeResolver); } assertTrue(codeRegistry.getTypeResolver(node) != null, - () -> String.format("You MUST provide a type resolver for the union type '%s'", node.getName())); + "You MUST provide a type resolver for the union type '%s'", node.getName()); return CONTINUE; } } diff --git a/src/main/java/graphql/schema/Coercing.java b/src/main/java/graphql/schema/Coercing.java index 0b4d127b80..cf8de535a5 100644 --- a/src/main/java/graphql/schema/Coercing.java +++ b/src/main/java/graphql/schema/Coercing.java @@ -1,7 +1,6 @@ package graphql.schema; -import graphql.DeprecatedAt; import graphql.GraphQLContext; import graphql.PublicSpi; import graphql.execution.CoercedVariables; @@ -54,8 +53,7 @@ public interface Coercing { * * @throws graphql.schema.CoercingSerializeException if value input can't be serialized */ - @Deprecated - @DeprecatedAt("2022-08-22") + @Deprecated(since = "2022-08-22") default @Nullable O serialize(@NotNull Object dataFetcherResult) throws CoercingSerializeException { throw new UnsupportedOperationException("The non deprecated version of serialize has not been implemented by this scalar : " + this.getClass()); } @@ -99,8 +97,7 @@ public interface Coercing { * * @throws graphql.schema.CoercingParseValueException if value input can't be parsed */ - @Deprecated - @DeprecatedAt("2022-08-22") + @Deprecated(since = "2022-08-22") default @Nullable I parseValue(@NotNull Object input) throws CoercingParseValueException { throw new UnsupportedOperationException("The non deprecated version of parseValue has not been implemented by this scalar : " + this.getClass()); } @@ -146,8 +143,7 @@ default I parseValue(@NotNull Object input, @NotNull GraphQLContext graphQLConte * * @throws graphql.schema.CoercingParseLiteralException if input literal can't be parsed */ - @Deprecated - @DeprecatedAt("2022-08-22") + @Deprecated(since = "2022-08-22") default @Nullable I parseLiteral(@NotNull Object input) throws CoercingParseLiteralException { throw new UnsupportedOperationException("The non deprecated version of parseLiteral has not been implemented by this scalar : " + this.getClass()); } @@ -175,8 +171,7 @@ default I parseValue(@NotNull Object input, @NotNull GraphQLContext graphQLConte * @throws graphql.schema.CoercingParseLiteralException if input literal can't be parsed */ @SuppressWarnings("unused") - @Deprecated - @DeprecatedAt("2022-08-22") + @Deprecated(since = "2022-08-22") default @Nullable I parseLiteral(Object input, Map variables) throws CoercingParseLiteralException { return parseLiteral(input); } @@ -222,8 +217,7 @@ default I parseValue(@NotNull Object input, @NotNull GraphQLContext graphQLConte * * @return The literal matching the external input value. */ - @Deprecated - @DeprecatedAt("2022-08-22") + @Deprecated(since = "2022-08-22") default @NotNull Value valueToLiteral(@NotNull Object input) { throw new UnsupportedOperationException("The non deprecated version of valueToLiteral has not been implemented by this scalar : " + this.getClass()); } diff --git a/src/main/java/graphql/schema/DataFetchingEnvironment.java b/src/main/java/graphql/schema/DataFetchingEnvironment.java index 041d6a9ca2..ce46ad75c9 100644 --- a/src/main/java/graphql/schema/DataFetchingEnvironment.java +++ b/src/main/java/graphql/schema/DataFetchingEnvironment.java @@ -1,6 +1,5 @@ package graphql.schema; -import graphql.DeprecatedAt; import graphql.GraphQLContext; import graphql.PublicApi; import graphql.execution.ExecutionId; @@ -87,8 +86,7 @@ public interface DataFetchingEnvironment extends IntrospectionDataFetchingEnviro * * @deprecated - use {@link #getGraphQlContext()} instead */ - @Deprecated - @DeprecatedAt("2021-07-05") + @Deprecated(since = "2021-07-05") T getContext(); /** @@ -138,8 +136,7 @@ public interface DataFetchingEnvironment extends IntrospectionDataFetchingEnviro * * @deprecated Use {@link #getMergedField()}. */ - @Deprecated - @DeprecatedAt("2018-12-20") + @Deprecated(since = "2018-12-20") List getFields(); /** diff --git a/src/main/java/graphql/schema/DataFetchingEnvironmentImpl.java b/src/main/java/graphql/schema/DataFetchingEnvironmentImpl.java index dbf9618a43..4377330e48 100644 --- a/src/main/java/graphql/schema/DataFetchingEnvironmentImpl.java +++ b/src/main/java/graphql/schema/DataFetchingEnvironmentImpl.java @@ -2,7 +2,6 @@ import com.google.common.collect.ImmutableMap; -import graphql.DeprecatedAt; import graphql.GraphQLContext; import graphql.Internal; import graphql.collect.ImmutableKit; @@ -95,7 +94,7 @@ public static Builder newDataFetchingEnvironment(ExecutionContext executionConte .locale(executionContext.getLocale()) .document(executionContext.getDocument()) .operationDefinition(executionContext.getOperationDefinition()) - .variables(executionContext.getVariables()) + .variables(executionContext.getCoercedVariables().toMap()) .executionId(executionContext.getExecutionId()); } @@ -306,8 +305,7 @@ public Builder arguments(Supplier> arguments) { return this; } - @Deprecated - @DeprecatedAt("2021-07-05") + @Deprecated(since = "2021-07-05") public Builder context(Object context) { this.context = context; return this; diff --git a/src/main/java/graphql/schema/DefaultGraphqlTypeComparatorRegistry.java b/src/main/java/graphql/schema/DefaultGraphqlTypeComparatorRegistry.java index a7c3c42f11..82620bcc1b 100644 --- a/src/main/java/graphql/schema/DefaultGraphqlTypeComparatorRegistry.java +++ b/src/main/java/graphql/schema/DefaultGraphqlTypeComparatorRegistry.java @@ -10,7 +10,7 @@ import java.util.function.UnaryOperator; import static graphql.Assert.assertNotNull; -import static graphql.schema.GraphQLTypeUtil.unwrapAll; +import static graphql.schema.GraphQLTypeUtil.unwrapAllAs; import static graphql.schema.GraphqlTypeComparatorEnvironment.newEnvironment; /** @@ -33,6 +33,7 @@ public class DefaultGraphqlTypeComparatorRegistry implements GraphqlTypeComparat /** * This orders the schema into a sensible grouped order + * * @return a comparator that allows for sensible grouped order */ public static Comparator sensibleGroupedOrder() { @@ -51,7 +52,11 @@ public static Comparator sensibleGroupedOrder() { private static GraphQLSchemaElement unwrapElement(GraphQLSchemaElement element) { if (element instanceof GraphQLType) { - element = unwrapAll((GraphQLType) element); + GraphQLType castElement = (GraphQLType) element; + // We need to unwrap as GraphQLType to support GraphQLTypeReferences which is not an GraphQLUnmodifiedType + // as returned by unwrapAll. + castElement = unwrapAllAs(castElement); + element = castElement; } return element; } @@ -59,7 +64,7 @@ private static GraphQLSchemaElement unwrapElement(GraphQLSchemaElement element) private static int compareByName(GraphQLSchemaElement o1, GraphQLSchemaElement o2) { return Comparator.comparing(element -> { if (element instanceof GraphQLType) { - element = unwrapAll((GraphQLType) element); + element = unwrapElement((GraphQLType) element); } if (element instanceof GraphQLNamedSchemaElement) { return ((GraphQLNamedSchemaElement) element).getName(); diff --git a/src/main/java/graphql/schema/DelegatingDataFetchingEnvironment.java b/src/main/java/graphql/schema/DelegatingDataFetchingEnvironment.java index ff10e1cdd7..41d6795556 100644 --- a/src/main/java/graphql/schema/DelegatingDataFetchingEnvironment.java +++ b/src/main/java/graphql/schema/DelegatingDataFetchingEnvironment.java @@ -1,6 +1,5 @@ package graphql.schema; -import graphql.DeprecatedAt; import graphql.GraphQLContext; import graphql.PublicApi; import graphql.execution.ExecutionId; @@ -64,8 +63,7 @@ public T getArgumentOrDefault(String name, T defaultValue) { return delegateEnvironment.getArgumentOrDefault(name, defaultValue); } - @Deprecated - @DeprecatedAt("2022-04-17") + @Deprecated(since = "2022-04-17") @Override public T getContext() { return delegateEnvironment.getContext(); @@ -91,8 +89,7 @@ public GraphQLFieldDefinition getFieldDefinition() { return delegateEnvironment.getFieldDefinition(); } - @Deprecated - @DeprecatedAt("2019-10-07") + @Deprecated(since = "2019-10-07") @Override public List getFields() { return delegateEnvironment.getFields(); diff --git a/src/main/java/graphql/schema/GraphQLArgument.java b/src/main/java/graphql/schema/GraphQLArgument.java index 4ba3f123ea..5f52f294cb 100644 --- a/src/main/java/graphql/schema/GraphQLArgument.java +++ b/src/main/java/graphql/schema/GraphQLArgument.java @@ -1,7 +1,6 @@ package graphql.schema; -import graphql.DeprecatedAt; import graphql.DirectivesUtil; import graphql.GraphQLContext; import graphql.PublicApi; @@ -83,7 +82,7 @@ private GraphQLArgument(String name, this.value = value; this.definition = definition; this.deprecationReason = deprecationReason; - this.directivesHolder = new DirectivesUtil.DirectivesHolder(directives, appliedDirectives); + this.directivesHolder = DirectivesUtil.DirectivesHolder.create(directives, appliedDirectives); } @@ -125,8 +124,7 @@ public boolean hasSetValue() { * * @deprecated use {@link GraphQLAppliedDirectiveArgument} instead */ - @Deprecated - @DeprecatedAt("2022-02-24") + @Deprecated(since = "2022-02-24") public @NotNull InputValueWithState getArgumentValue() { return value; } @@ -149,8 +147,7 @@ public boolean hasSetValue() { * * @deprecated use {@link GraphQLAppliedDirectiveArgument} instead */ - @Deprecated - @DeprecatedAt("2022-02-24") + @Deprecated(since = "2022-02-24") public static T getArgumentValue(GraphQLArgument argument) { return getInputValueImpl(argument.getType(), argument.getArgumentValue(), GraphQLContext.getDefault(), Locale.getDefault()); } @@ -374,8 +371,7 @@ public Builder type(GraphQLInputType type) { * * @deprecated use {@link #defaultValueLiteral(Value)} or {@link #defaultValueProgrammatic(Object)} */ - @Deprecated - @DeprecatedAt("2021-05-10") + @Deprecated(since = "2021-05-10") public Builder defaultValue(Object defaultValue) { this.defaultValue = InputValueWithState.newInternalValue(defaultValue); return this; @@ -420,8 +416,7 @@ public Builder clearDefaultValue() { * * @deprecated use {@link #valueLiteral(Value)} or {@link #valueProgrammatic(Object)} */ - @Deprecated - @DeprecatedAt("2021-05-10") + @Deprecated(since = "2021-05-10") public Builder value(@Nullable Object value) { this.value = InputValueWithState.newInternalValue(value); return this; @@ -436,8 +431,7 @@ public Builder value(@Nullable Object value) { * * @deprecated use {@link GraphQLAppliedDirectiveArgument} methods instead */ - @Deprecated - @DeprecatedAt("2022-02-24") + @Deprecated(since = "2022-02-24") public Builder valueLiteral(@NotNull Value value) { this.value = InputValueWithState.newLiteralValue(value); return this; @@ -450,8 +444,7 @@ public Builder valueLiteral(@NotNull Value value) { * * @deprecated use {@link GraphQLAppliedDirectiveArgument} methods instead */ - @Deprecated - @DeprecatedAt("2022-02-24") + @Deprecated(since = "2022-02-24") public Builder valueProgrammatic(@Nullable Object value) { this.value = InputValueWithState.newExternalValue(value); return this; @@ -464,8 +457,7 @@ public Builder valueProgrammatic(@Nullable Object value) { * * @deprecated use {@link GraphQLAppliedDirectiveArgument} methods instead */ - @Deprecated - @DeprecatedAt("2022-02-24") + @Deprecated(since = "2022-02-24") public Builder clearValue() { this.value = InputValueWithState.NOT_SET; return this; diff --git a/src/main/java/graphql/schema/GraphQLCodeRegistry.java b/src/main/java/graphql/schema/GraphQLCodeRegistry.java index 9620ca19a7..768d13897e 100644 --- a/src/main/java/graphql/schema/GraphQLCodeRegistry.java +++ b/src/main/java/graphql/schema/GraphQLCodeRegistry.java @@ -1,7 +1,6 @@ package graphql.schema; import graphql.Assert; -import graphql.DeprecatedAt; import graphql.Internal; import graphql.PublicApi; import graphql.schema.visibility.GraphqlFieldVisibility; @@ -50,24 +49,6 @@ public GraphqlFieldVisibility getFieldVisibility() { return fieldVisibility; } - /** - * Returns a data fetcher associated with a field within a container type - * - * @param parentType the container type - * @param fieldDefinition the field definition - * - * @return the DataFetcher associated with this field. All fields have data fetchers - * - * @see #getDataFetcher(GraphQLObjectType, GraphQLFieldDefinition) - * @deprecated This is confusing because {@link GraphQLInterfaceType}s cant have data fetchers. At runtime only a {@link GraphQLObjectType} - * can be used to fetch a field. This method allows the mapping to be made, but it is never useful if an interface is passed in. - */ - @Deprecated - @DeprecatedAt("2023-05-13") - public DataFetcher getDataFetcher(GraphQLFieldsContainer parentType, GraphQLFieldDefinition fieldDefinition) { - return getDataFetcherImpl(FieldCoordinates.coordinates(parentType, fieldDefinition), fieldDefinition, dataFetcherMap, systemDataFetcherMap, defaultDataFetcherFactory); - } - /** * Returns a data fetcher associated with a field within an object type * @@ -159,7 +140,7 @@ private static TypeResolver getTypeResolverForInterface(GraphQLInterfaceType par if (typeResolver == null) { typeResolver = parentType.getTypeResolver(); } - return assertNotNull(typeResolver, () -> "There must be a type resolver for interface " + parentType.getName()); + return assertNotNull(typeResolver, "There must be a type resolver for interface %s", parentType.getName()); } private static TypeResolver getTypeResolverForUnion(GraphQLUnionType parentType, Map typeResolverMap) { @@ -168,7 +149,7 @@ private static TypeResolver getTypeResolverForUnion(GraphQLUnionType parentType, if (typeResolver == null) { typeResolver = parentType.getTypeResolver(); } - return assertNotNull(typeResolver, () -> "There must be a type resolver for union " + parentType.getName()); + return assertNotNull(typeResolver, "There must be a type resolver for union %s",parentType.getName()); } /** @@ -254,24 +235,6 @@ private Builder markChanged(boolean condition) { return this; } - /** - * Returns a data fetcher associated with a field within a container type - * - * @param parentType the container type - * @param fieldDefinition the field definition - * - * @return the DataFetcher associated with this field. All fields have data fetchers - * - * @see #getDataFetcher(GraphQLObjectType, GraphQLFieldDefinition) - * @deprecated This is confusing because {@link GraphQLInterfaceType}s cant have data fetchers. At runtime only a {@link GraphQLObjectType} - * can be used to fetch a field. This method allows the mapping to be made, but it is never useful if an interface is passed in. - */ - @Deprecated - @DeprecatedAt("2023-05-13") - public DataFetcher getDataFetcher(GraphQLFieldsContainer parentType, GraphQLFieldDefinition fieldDefinition) { - return getDataFetcherImpl(FieldCoordinates.coordinates(parentType, fieldDefinition), fieldDefinition, dataFetcherMap, systemDataFetcherMap, defaultDataFetcherFactory); - } - /** * Returns a data fetcher associated with a field within an object type * @@ -360,26 +323,6 @@ public Builder dataFetcher(FieldCoordinates coordinates, DataFetcher dataFetc return dataFetcher(assertNotNull(coordinates), DataFetcherFactories.useDataFetcher(dataFetcher)); } - /** - * Sets the data fetcher for a specific field inside a container type - * - * @param parentType the container type - * @param fieldDefinition the field definition - * @param dataFetcher the data fetcher code for that field - * - * @return this builder - * - * @see #dataFetcher(GraphQLObjectType, GraphQLFieldDefinition, DataFetcher) - * @deprecated This is confusing because {@link GraphQLInterfaceType}s cant have data fetchers. At runtime only a {@link GraphQLObjectType} - * can be used to fetch a field. This method allows the mapping to be made, but it is never useful if an interface is passed in. - */ - @Deprecated - @DeprecatedAt("2023-05-13") - public Builder dataFetcher(GraphQLFieldsContainer parentType, GraphQLFieldDefinition fieldDefinition, DataFetcher dataFetcher) { - return dataFetcher(FieldCoordinates.coordinates(parentType.getName(), fieldDefinition.getName()), dataFetcher); - } - - /** * Sets the data fetcher for a specific field inside an object type * diff --git a/src/main/java/graphql/schema/GraphQLDirective.java b/src/main/java/graphql/schema/GraphQLDirective.java index 1dbc41042c..9037482f21 100644 --- a/src/main/java/graphql/schema/GraphQLDirective.java +++ b/src/main/java/graphql/schema/GraphQLDirective.java @@ -15,6 +15,7 @@ import java.util.function.Consumer; import java.util.function.UnaryOperator; +import static graphql.Assert.assertNotEmpty; import static graphql.Assert.assertNotNull; import static graphql.Assert.assertValidName; import static graphql.introspection.Introspection.DirectiveLocation; @@ -52,6 +53,7 @@ private GraphQLDirective(String name, DirectiveDefinition definition) { assertValidName(name); assertNotNull(arguments, () -> "arguments can't be null"); + assertNotEmpty(locations, () -> "locations can't be empty"); this.name = name; this.description = description; this.repeatable = repeatable; diff --git a/src/main/java/graphql/schema/GraphQLDirectiveContainer.java b/src/main/java/graphql/schema/GraphQLDirectiveContainer.java index 52caebef29..4bee55aa36 100644 --- a/src/main/java/graphql/schema/GraphQLDirectiveContainer.java +++ b/src/main/java/graphql/schema/GraphQLDirectiveContainer.java @@ -1,6 +1,5 @@ package graphql.schema; -import graphql.DeprecatedAt; import graphql.PublicApi; import java.util.List; @@ -76,8 +75,7 @@ default List getAppliedDirectives(String directiveName) * * @deprecated use {@link #hasAppliedDirective(String)} instead */ - @Deprecated - @DeprecatedAt("2022-02-24") + @Deprecated(since = "2022-02-24") default boolean hasDirective(String directiveName) { return getAllDirectivesByName().containsKey(directiveName); } @@ -101,8 +99,7 @@ default boolean hasAppliedDirective(String directiveName) { * * @deprecated - use the {@link GraphQLAppliedDirective} methods instead */ - @Deprecated - @DeprecatedAt("2022-02-24") + @Deprecated(since = "2022-02-24") List getDirectives(); /** @@ -113,8 +110,7 @@ default boolean hasAppliedDirective(String directiveName) { * * @deprecated - use the {@link GraphQLAppliedDirective} methods instead */ - @Deprecated - @DeprecatedAt("2022-02-24") + @Deprecated(since = "2022-02-24") Map getDirectivesByName(); /** @@ -125,8 +121,7 @@ default boolean hasAppliedDirective(String directiveName) { * * @deprecated - use the {@link GraphQLAppliedDirective} methods instead */ - @Deprecated - @DeprecatedAt("2022-02-24") + @Deprecated(since = "2022-02-24") Map> getAllDirectivesByName(); /** @@ -139,8 +134,7 @@ default boolean hasAppliedDirective(String directiveName) { * * @deprecated - use the {@link GraphQLAppliedDirective} methods instead */ - @Deprecated - @DeprecatedAt("2022-02-24") + @Deprecated(since = "2022-02-24") GraphQLDirective getDirective(String directiveName); /** @@ -152,8 +146,7 @@ default boolean hasAppliedDirective(String directiveName) { * * @deprecated - use the {@link GraphQLAppliedDirective} methods instead */ - @Deprecated - @DeprecatedAt("2022-02-24") + @Deprecated(since = "2022-02-24") default List getDirectives(String directiveName) { return getAllDirectivesByName().getOrDefault(directiveName, emptyList()); } diff --git a/src/main/java/graphql/schema/GraphQLEnumType.java b/src/main/java/graphql/schema/GraphQLEnumType.java index 00ced1b451..78d08fa900 100644 --- a/src/main/java/graphql/schema/GraphQLEnumType.java +++ b/src/main/java/graphql/schema/GraphQLEnumType.java @@ -66,7 +66,7 @@ private GraphQLEnumType(String name, this.description = description; this.definition = definition; this.extensionDefinitions = ImmutableList.copyOf(extensionDefinitions); - this.directivesHolder = new DirectivesUtil.DirectivesHolder(directives, appliedDirectives); + this.directivesHolder = DirectivesUtil.DirectivesHolder.create(directives, appliedDirectives); this.valueDefinitionMap = buildMap(values); } @@ -130,7 +130,9 @@ public Value valueToLiteral(Object input) { @Internal public Value valueToLiteral(Object input, GraphQLContext graphQLContext, Locale locale) { GraphQLEnumValueDefinition enumValueDefinition = valueDefinitionMap.get(input.toString()); - assertNotNull(enumValueDefinition, () -> i18nMsg(locale, "Enum.badName", name, input.toString())); + if (enumValueDefinition == null) { + assertShouldNeverHappen(i18nMsg(locale, "Enum.badName", name, input.toString())); + }; return EnumValue.newEnumValue(enumValueDefinition.getName()).build(); } diff --git a/src/main/java/graphql/schema/GraphQLEnumValueDefinition.java b/src/main/java/graphql/schema/GraphQLEnumValueDefinition.java index 4d34417773..4aba114c7b 100644 --- a/src/main/java/graphql/schema/GraphQLEnumValueDefinition.java +++ b/src/main/java/graphql/schema/GraphQLEnumValueDefinition.java @@ -48,7 +48,7 @@ private GraphQLEnumValueDefinition(String name, this.description = description; this.value = value; this.deprecationReason = deprecationReason; - this.directivesHolder = new DirectivesUtil.DirectivesHolder(directives, appliedDirectives); + this.directivesHolder = DirectivesUtil.DirectivesHolder.create(directives, appliedDirectives); this.definition = definition; } diff --git a/src/main/java/graphql/schema/GraphQLFieldDefinition.java b/src/main/java/graphql/schema/GraphQLFieldDefinition.java index 79f824997d..991da1793d 100644 --- a/src/main/java/graphql/schema/GraphQLFieldDefinition.java +++ b/src/main/java/graphql/schema/GraphQLFieldDefinition.java @@ -2,11 +2,11 @@ import com.google.common.collect.ImmutableList; -import graphql.DeprecatedAt; import graphql.DirectivesUtil; import graphql.Internal; import graphql.PublicApi; import graphql.language.FieldDefinition; +import graphql.util.Interning; import graphql.util.TraversalControl; import graphql.util.TraverserContext; @@ -62,12 +62,12 @@ private GraphQLFieldDefinition(String name, assertValidName(name); assertNotNull(type, () -> "type can't be null"); assertNotNull(arguments, () -> "arguments can't be null"); - this.name = name; + this.name = Interning.intern(name); this.description = description; this.originalType = type; this.dataFetcherFactory = dataFetcherFactory; this.arguments = ImmutableList.copyOf(arguments); - this.directivesHolder = new DirectivesUtil.DirectivesHolder(directives, appliedDirectives); + this.directivesHolder = DirectivesUtil.DirectivesHolder.create(directives, appliedDirectives); this.deprecationReason = deprecationReason; this.definition = definition; } @@ -88,8 +88,7 @@ public GraphQLOutputType getType() { // to be removed in a future version when all code is in the code registry @Internal - @Deprecated - @DeprecatedAt("2018-12-03") + @Deprecated(since = "2018-12-03") DataFetcher getDataFetcher() { if (dataFetcherFactory == null) { return null; @@ -310,8 +309,7 @@ public Builder type(GraphQLOutputType type) { * * @deprecated use {@link graphql.schema.GraphQLCodeRegistry} instead */ - @Deprecated - @DeprecatedAt("2018-12-03") + @Deprecated(since = "2018-12-03") public Builder dataFetcher(DataFetcher dataFetcher) { assertNotNull(dataFetcher, () -> "dataFetcher must be not null"); this.dataFetcherFactory = DataFetcherFactories.useDataFetcher(dataFetcher); @@ -327,8 +325,7 @@ public Builder dataFetcher(DataFetcher dataFetcher) { * * @deprecated use {@link graphql.schema.GraphQLCodeRegistry} instead */ - @Deprecated - @DeprecatedAt("2018-12-03") + @Deprecated(since = "2018-12-03") public Builder dataFetcherFactory(DataFetcherFactory dataFetcherFactory) { assertNotNull(dataFetcherFactory, () -> "dataFetcherFactory must be not null"); this.dataFetcherFactory = dataFetcherFactory; @@ -344,8 +341,7 @@ public Builder dataFetcherFactory(DataFetcherFactory dataFetcherFactory) { * * @deprecated use {@link graphql.schema.GraphQLCodeRegistry} instead */ - @Deprecated - @DeprecatedAt("2018-12-03") + @Deprecated(since = "2018-12-03") public Builder staticValue(final Object value) { this.dataFetcherFactory = DataFetcherFactories.useDataFetcher(environment -> value); return this; @@ -398,8 +394,7 @@ public Builder argument(GraphQLArgument.Builder builder) { * * @deprecated This is a badly named method and is replaced by {@link #arguments(java.util.List)} */ - @Deprecated - @DeprecatedAt("2019-02-06") + @Deprecated(since = "2019-02-06") public Builder argument(List arguments) { return arguments(arguments); } diff --git a/src/main/java/graphql/schema/GraphQLInputObjectField.java b/src/main/java/graphql/schema/GraphQLInputObjectField.java index fda458defc..60e680fb30 100644 --- a/src/main/java/graphql/schema/GraphQLInputObjectField.java +++ b/src/main/java/graphql/schema/GraphQLInputObjectField.java @@ -1,7 +1,6 @@ package graphql.schema; -import graphql.DeprecatedAt; import graphql.DirectivesUtil; import graphql.GraphQLContext; import graphql.PublicApi; @@ -63,7 +62,7 @@ private GraphQLInputObjectField( this.originalType = type; this.defaultValue = defaultValue; this.description = description; - this.directivesHolder = new DirectivesUtil.DirectivesHolder(directives, appliedDirectives); + this.directivesHolder = DirectivesUtil.DirectivesHolder.create(directives, appliedDirectives); this.definition = definition; this.deprecationReason = deprecationReason; } @@ -315,8 +314,7 @@ public Builder type(GraphQLInputType type) { * * @deprecated use {@link #defaultValueLiteral(Value)} */ - @Deprecated - @DeprecatedAt("2021-05-10") + @Deprecated(since = "2021-05-10") public Builder defaultValue(Object defaultValue) { this.defaultValue = InputValueWithState.newInternalValue(defaultValue); return this; diff --git a/src/main/java/graphql/schema/GraphQLInputObjectType.java b/src/main/java/graphql/schema/GraphQLInputObjectType.java index 81b0d63160..9929f93493 100644 --- a/src/main/java/graphql/schema/GraphQLInputObjectType.java +++ b/src/main/java/graphql/schema/GraphQLInputObjectType.java @@ -61,7 +61,7 @@ private GraphQLInputObjectType(String name, this.description = description; this.definition = definition; this.extensionDefinitions = ImmutableList.copyOf(extensionDefinitions); - this.directives = new DirectivesUtil.DirectivesHolder(directives, appliedDirectives); + this.directives = DirectivesUtil.DirectivesHolder.create(directives, appliedDirectives); this.fieldMap = buildDefinitionMap(fields); this.isOneOf = hasOneOf(directives, appliedDirectives); } diff --git a/src/main/java/graphql/schema/GraphQLInterfaceType.java b/src/main/java/graphql/schema/GraphQLInterfaceType.java index 814609a7c5..5dff23c40a 100644 --- a/src/main/java/graphql/schema/GraphQLInterfaceType.java +++ b/src/main/java/graphql/schema/GraphQLInterfaceType.java @@ -2,13 +2,12 @@ import com.google.common.collect.ImmutableList; import graphql.Assert; -import graphql.AssertException; -import graphql.DeprecatedAt; import graphql.DirectivesUtil; import graphql.Internal; import graphql.PublicApi; import graphql.language.InterfaceTypeDefinition; import graphql.language.InterfaceTypeExtensionDefinition; +import graphql.util.FpKit; import graphql.util.TraversalControl; import graphql.util.TraverserContext; @@ -21,12 +20,12 @@ import java.util.function.UnaryOperator; import static graphql.Assert.assertNotNull; +import static graphql.Assert.assertShouldNeverHappen; import static graphql.Assert.assertValidName; import static graphql.collect.ImmutableKit.emptyList; import static graphql.schema.GraphqlTypeComparators.sortTypes; import static graphql.util.FpKit.getByName; import static graphql.util.FpKit.valuesToList; -import static java.lang.String.format; /** * In graphql, an interface is an abstract type that defines the set of fields that a type must include to @@ -42,7 +41,7 @@ public class GraphQLInterfaceType implements GraphQLNamedType, GraphQLCompositeT private final String name; private final String description; - private final Map fieldDefinitionsByName = new LinkedHashMap<>(); + private final Map fieldDefinitionsByName; private final TypeResolver typeResolver; private final InterfaceTypeDefinition definition; private final ImmutableList extensionDefinitions; @@ -52,7 +51,6 @@ public class GraphQLInterfaceType implements GraphQLNamedType, GraphQLCompositeT private final Comparator interfaceComparator; private ImmutableList replacedInterfaces; - public static final String CHILD_FIELD_DEFINITIONS = "fieldDefinitions"; public static final String CHILD_INTERFACES = "interfaces"; @@ -78,18 +76,13 @@ private GraphQLInterfaceType(String name, this.interfaceComparator = interfaceComparator; this.originalInterfaces = ImmutableList.copyOf(sortTypes(interfaceComparator, interfaces)); this.extensionDefinitions = ImmutableList.copyOf(extensionDefinitions); - this.directivesHolder = new DirectivesUtil.DirectivesHolder(directives, appliedDirectives); - buildDefinitionMap(fieldDefinitions); + this.directivesHolder = DirectivesUtil.DirectivesHolder.create(directives, appliedDirectives); + this.fieldDefinitionsByName = buildDefinitionMap(fieldDefinitions); } - private void buildDefinitionMap(List fieldDefinitions) { - for (GraphQLFieldDefinition fieldDefinition : fieldDefinitions) { - String name = fieldDefinition.getName(); - if (fieldDefinitionsByName.containsKey(name)) { - throw new AssertException(format("Duplicated definition for field '%s' in interface '%s'", name, this.name)); - } - fieldDefinitionsByName.put(name, fieldDefinition); - } + private Map buildDefinitionMap(List fieldDefinitions) { + return FpKit.getByName(fieldDefinitions, GraphQLFieldDefinition::getName, + (fld1, fld2) -> assertShouldNeverHappen("Duplicated definition for field '%s' in interface '%s'", fld1.getName(), this.name)); } @Override @@ -97,7 +90,6 @@ public GraphQLFieldDefinition getFieldDefinition(String name) { return fieldDefinitionsByName.get(name); } - @Override public List getFieldDefinitions() { return ImmutableList.copyOf(fieldDefinitionsByName.values()); @@ -114,8 +106,7 @@ public String getDescription() { // to be removed in a future version when all code is in the code registry @Internal - @Deprecated - @DeprecatedAt("2018-12-03") + @Deprecated(since = "2018-12-03") TypeResolver getTypeResolver() { return typeResolver; } @@ -231,9 +222,9 @@ public GraphQLInterfaceType withNewChildren(SchemaElementChildrenContainer newCh @Override public List getInterfaces() { if (replacedInterfaces != null) { - return ImmutableList.copyOf(replacedInterfaces); + return replacedInterfaces; } - return ImmutableList.copyOf(originalInterfaces); + return originalInterfaces; } void replaceInterfaces(List interfaces) { @@ -370,8 +361,7 @@ public Builder clearFields() { * * @deprecated use {@link graphql.schema.GraphQLCodeRegistry.Builder#typeResolver(GraphQLInterfaceType, TypeResolver)} instead */ - @Deprecated - @DeprecatedAt("2018-12-03") + @Deprecated(since = "2018-12-03") public Builder typeResolver(TypeResolver typeResolver) { this.typeResolver = typeResolver; return this; diff --git a/src/main/java/graphql/schema/GraphQLNonNull.java b/src/main/java/graphql/schema/GraphQLNonNull.java index 6de1ba61d3..12f131b428 100644 --- a/src/main/java/graphql/schema/GraphQLNonNull.java +++ b/src/main/java/graphql/schema/GraphQLNonNull.java @@ -46,8 +46,8 @@ public GraphQLNonNull(GraphQLType wrappedType) { } private void assertNonNullWrapping(GraphQLType wrappedType) { - assertTrue(!GraphQLTypeUtil.isNonNull(wrappedType), () -> - String.format("A non null type cannot wrap an existing non null type '%s'", GraphQLTypeUtil.simplePrint(wrappedType))); + assertTrue(!GraphQLTypeUtil.isNonNull(wrappedType), + "A non null type cannot wrap an existing non null type '%s'", GraphQLTypeUtil.simplePrint(wrappedType)); } @Override diff --git a/src/main/java/graphql/schema/GraphQLObjectType.java b/src/main/java/graphql/schema/GraphQLObjectType.java index 732c0ff753..a1bdcdfa22 100644 --- a/src/main/java/graphql/schema/GraphQLObjectType.java +++ b/src/main/java/graphql/schema/GraphQLObjectType.java @@ -40,11 +40,10 @@ @PublicApi public class GraphQLObjectType implements GraphQLNamedOutputType, GraphQLCompositeType, GraphQLUnmodifiedType, GraphQLNullableType, GraphQLDirectiveContainer, GraphQLImplementingType { - private final String name; private final String description; private final Comparator interfaceComparator; - private final ImmutableMap fieldDefinitionsByName; + private final Map fieldDefinitionsByName; private final ImmutableList originalInterfaces; private final DirectivesUtil.DirectivesHolder directivesHolder; private final ObjectTypeDefinition definition; @@ -74,7 +73,7 @@ private GraphQLObjectType(String name, this.originalInterfaces = ImmutableList.copyOf(sortTypes(interfaceComparator, interfaces)); this.definition = definition; this.extensionDefinitions = ImmutableList.copyOf(extensionDefinitions); - this.directivesHolder = new DirectivesUtil.DirectivesHolder(directives, appliedDirectives); + this.directivesHolder = DirectivesUtil.DirectivesHolder.create(directives, appliedDirectives); this.fieldDefinitionsByName = buildDefinitionMap(fieldDefinitions); } @@ -82,9 +81,9 @@ void replaceInterfaces(List interfaces) { this.replacedInterfaces = ImmutableList.copyOf(sortTypes(interfaceComparator, interfaces)); } - private ImmutableMap buildDefinitionMap(List fieldDefinitions) { - return ImmutableMap.copyOf(FpKit.getByName(fieldDefinitions, GraphQLFieldDefinition::getName, - (fld1, fld2) -> assertShouldNeverHappen("Duplicated definition for field '%s' in type '%s'", fld1.getName(), this.name))); + private Map buildDefinitionMap(List fieldDefinitions) { + return FpKit.getByName(fieldDefinitions, GraphQLFieldDefinition::getName, + (fld1, fld2) -> assertShouldNeverHappen("Duplicated definition for field '%s' in type '%s'", fld1.getName(), this.name)); } @Override @@ -132,7 +131,6 @@ public List getFieldDefinitions() { return ImmutableList.copyOf(fieldDefinitionsByName.values()); } - @Override public List getInterfaces() { if (replacedInterfaces != null) { diff --git a/src/main/java/graphql/schema/GraphQLScalarType.java b/src/main/java/graphql/schema/GraphQLScalarType.java index 137a6047c0..fccfb0013b 100644 --- a/src/main/java/graphql/schema/GraphQLScalarType.java +++ b/src/main/java/graphql/schema/GraphQLScalarType.java @@ -64,7 +64,7 @@ private GraphQLScalarType(String name, this.description = description; this.coercing = coercing; this.definition = definition; - this.directivesHolder = new DirectivesUtil.DirectivesHolder(directives, appliedDirectives); + this.directivesHolder = DirectivesUtil.DirectivesHolder.create(directives, appliedDirectives); this.extensionDefinitions = ImmutableList.copyOf(extensionDefinitions); this.specifiedByUrl = specifiedByUrl; } diff --git a/src/main/java/graphql/schema/GraphQLSchema.java b/src/main/java/graphql/schema/GraphQLSchema.java index 335c694af6..a24928869c 100644 --- a/src/main/java/graphql/schema/GraphQLSchema.java +++ b/src/main/java/graphql/schema/GraphQLSchema.java @@ -5,7 +5,6 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import graphql.Assert; -import graphql.DeprecatedAt; import graphql.Directives; import graphql.DirectivesUtil; import graphql.Internal; @@ -19,7 +18,6 @@ import graphql.schema.validation.InvalidSchemaException; import graphql.schema.validation.SchemaValidationError; import graphql.schema.validation.SchemaValidator; -import graphql.schema.visibility.GraphqlFieldVisibility; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; @@ -72,7 +70,7 @@ public class GraphQLSchema { private final ImmutableMap> interfaceNameToObjectTypeNames; /* - * This constructs partial GraphQL schema object which has has the schema (query / mutation / subscription) trees + * This constructs partial GraphQL schema object which has the schema (query / mutation / subscription) trees * in it but it does not have the collected types, code registry nor the type references replaced * * But it can be traversed to discover all that and filled out later via another constructor. @@ -105,7 +103,7 @@ private GraphQLSchema(Builder builder) { } /* - * This constructs a full fledged graphql schema object that has not yet had its type references replaced + * This constructs a fully fledged graphql schema object that has not yet had its type references replaced * but it's otherwise complete */ @Internal @@ -249,14 +247,14 @@ public Set getAdditionalTypes() { public List getTypes(Collection typeNames) { ImmutableList.Builder builder = ImmutableList.builder(); for (String typeName : typeNames) { - builder.add((T) assertNotNull(typeMap.get(typeName), () -> String.format("No type found for name %s", typeName))); + builder.add((T) assertNotNull(typeMap.get(typeName), "No type found for name %s", typeName)); } return builder.build(); } /** * Gets the named type from the schema or null if it's not present. - * + *

* Warning - you are inviting class cast errors if the types are not what you expect. * * @param typeName the name of the type to retrieve @@ -287,13 +285,13 @@ public boolean containsType(String typeName) { * * @return a graphql object type or null if there is one * - * @throws graphql.GraphQLException if the type is NOT a object type + * @throws graphql.GraphQLException if the type is NOT an object type */ public GraphQLObjectType getObjectType(String typeName) { GraphQLType graphQLType = typeMap.get(typeName); if (graphQLType != null) { assertTrue(graphQLType instanceof GraphQLObjectType, - () -> String.format("You have asked for named object type '%s' but it's not an object type but rather a '%s'", typeName, graphQLType.getClass().getName())); + "You have asked for named object type '%s' but it's not an object type but rather a '%s'", typeName, graphQLType.getClass().getName()); } return (GraphQLObjectType) graphQLType; } @@ -324,7 +322,7 @@ public GraphQLFieldDefinition getFieldDefinition(FieldCoordinates fieldCoordinat GraphQLType graphQLType = getType(typeName); if (graphQLType != null) { assertTrue(graphQLType instanceof GraphQLFieldsContainer, - () -> String.format("You have asked for named type '%s' but it's not GraphQLFieldsContainer but rather a '%s'", typeName, graphQLType.getClass().getName())); + "You have asked for named type '%s' but it's not GraphQLFieldsContainer but rather a '%s'", typeName, graphQLType.getClass().getName()); return ((GraphQLFieldsContainer) graphQLType).getFieldDefinition(fieldName); } return null; @@ -413,17 +411,6 @@ public GraphQLObjectType getSubscriptionType() { return subscriptionType; } - /** - * @return the field visibility - * - * @deprecated use {@link GraphQLCodeRegistry#getFieldVisibility()} instead - */ - @Deprecated - @DeprecatedAt("2018-12-03") - public GraphqlFieldVisibility getFieldVisibility() { - return codeRegistry.getFieldVisibility(); - } - /** * This returns the list of directives definitions that are associated with this schema object including * built in ones. @@ -435,14 +422,14 @@ public List getDirectives() { } /** - * @return a map of non repeatable directives by directive name + * @return a map of non-repeatable directives by directive name */ public Map getDirectivesByName() { return directiveDefinitionsHolder.getDirectivesByName(); } /** - * Returns a named directive that (for legacy reasons) will be only in the set of non repeatable directives + * Returns a named directive that (for legacy reasons) will be only in the set of non-repeatable directives * * @param directiveName the name of the directive to retrieve * @@ -453,7 +440,6 @@ public GraphQLDirective getDirective(String directiveName) { } - /** * This returns the list of directives that have been explicitly applied to the * schema object. Note that {@link #getDirectives()} will return @@ -464,8 +450,7 @@ public GraphQLDirective getDirective(String directiveName) { * * @deprecated Use the {@link GraphQLAppliedDirective} methods instead */ - @Deprecated - @DeprecatedAt("2022-02-24") + @Deprecated(since = "2022-02-24") public List getSchemaDirectives() { return schemaAppliedDirectivesHolder.getDirectives(); } @@ -480,8 +465,7 @@ public List getSchemaDirectives() { * * @deprecated Use the {@link GraphQLAppliedDirective} methods instead */ - @Deprecated - @DeprecatedAt("2022-02-24") + @Deprecated(since = "2022-02-24") public Map getSchemaDirectiveByName() { return schemaAppliedDirectivesHolder.getDirectivesByName(); } @@ -496,8 +480,7 @@ public Map getSchemaDirectiveByName() { * * @deprecated Use the {@link GraphQLAppliedDirective} methods instead */ - @Deprecated - @DeprecatedAt("2022-02-24") + @Deprecated(since = "2022-02-24") public Map> getAllSchemaDirectivesByName() { return schemaAppliedDirectivesHolder.getAllDirectivesByName(); } @@ -514,8 +497,7 @@ public Map> getAllSchemaDirectivesByName() { * * @deprecated Use the {@link GraphQLAppliedDirective} methods instead */ - @Deprecated - @DeprecatedAt("2022-02-24") + @Deprecated(since = "2022-02-24") public GraphQLDirective getSchemaDirective(String directiveName) { return schemaAppliedDirectivesHolder.getDirective(directiveName); } @@ -530,8 +512,7 @@ public GraphQLDirective getSchemaDirective(String directiveName) { * * @deprecated Use the {@link GraphQLAppliedDirective} methods instead */ - @Deprecated - @DeprecatedAt("2022-02-24") + @Deprecated(since = "2022-02-24") public List getSchemaDirectives(String directiveName) { return schemaAppliedDirectivesHolder.getDirectives(directiveName); } @@ -542,7 +523,7 @@ public List getSchemaDirectives(String directiveName) { * directives for all schema elements, whereas this is just for the schema * element itself * - * @return a map of directives + * @return a list of directives */ public List getSchemaAppliedDirectives() { return schemaAppliedDirectivesHolder.getAppliedDirectives(); @@ -648,6 +629,7 @@ public static Builder newSchema(GraphQLSchema existingSchema) { .query(existingSchema.getQueryType()) .mutation(existingSchema.getMutationType()) .subscription(existingSchema.getSubscriptionType()) + .extensionDefinitions(existingSchema.getExtensionDefinitions()) .introspectionSchemaType(existingSchema.getIntrospectionSchemaType()) .codeRegistry(existingSchema.getCodeRegistry()) .clearAdditionalTypes() @@ -735,20 +717,6 @@ public Builder subscription(GraphQLObjectType subscriptionType) { return this; } - /** - * @param fieldVisibility the field visibility - * - * @return this builder - * - * @deprecated use {@link graphql.schema.GraphQLCodeRegistry.Builder#fieldVisibility(graphql.schema.visibility.GraphqlFieldVisibility)} instead - */ - @Deprecated - @DeprecatedAt("2018-12-03") - public Builder fieldVisibility(GraphqlFieldVisibility fieldVisibility) { - this.codeRegistry = this.codeRegistry.transform(builder -> builder.fieldVisibility(fieldVisibility)); - return this; - } - public Builder codeRegistry(GraphQLCodeRegistry codeRegistry) { this.codeRegistry = codeRegistry; return this; @@ -864,37 +832,6 @@ public Builder introspectionSchemaType(GraphQLObjectType introspectionSchemaType return this; } - /** - * Builds the schema - * - * @param additionalTypes - please don't use this anymore - * - * @return the built schema - * - * @deprecated - Use the {@link #additionalType(GraphQLType)} methods - */ - @Deprecated - @DeprecatedAt("2018-07-30") - public GraphQLSchema build(Set additionalTypes) { - return additionalTypes(additionalTypes).build(); - } - - /** - * Builds the schema - * - * @param additionalTypes - please don't use this any more - * @param additionalDirectives - please don't use this any more - * - * @return the built schema - * - * @deprecated - Use the {@link #additionalType(GraphQLType)} and {@link #additionalDirective(GraphQLDirective)} methods - */ - @Deprecated - @DeprecatedAt("2018-07-30") - public GraphQLSchema build(Set additionalTypes, Set additionalDirectives) { - return additionalTypes(additionalTypes).additionalDirectives(additionalDirectives).build(); - } - /** * Builds the schema * @@ -944,7 +881,7 @@ private GraphQLSchema buildImpl() { private GraphQLSchema validateSchema(GraphQLSchema graphQLSchema) { Collection errors = new SchemaValidator().validateSchema(graphQLSchema); - if (errors.size() > 0) { + if (!errors.isEmpty()) { throw new InvalidSchemaException(errors); } return graphQLSchema; diff --git a/src/main/java/graphql/schema/GraphQLTypeResolvingVisitor.java b/src/main/java/graphql/schema/GraphQLTypeResolvingVisitor.java index 0380ac5cd1..37a57f8933 100644 --- a/src/main/java/graphql/schema/GraphQLTypeResolvingVisitor.java +++ b/src/main/java/graphql/schema/GraphQLTypeResolvingVisitor.java @@ -46,7 +46,7 @@ public TraversalControl visitGraphQLTypeReference(GraphQLTypeReference node, Tra public TraversalControl handleTypeReference(GraphQLTypeReference node, TraverserContext context) { final GraphQLType resolvedType = typeMap.get(node.getName()); - assertNotNull(resolvedType, () -> String.format("type %s not found in schema", node.getName())); + assertNotNull(resolvedType, "type %s not found in schema", node.getName()); context.getParentContext().thisNode().accept(context, new TypeRefResolvingVisitor(resolvedType)); return CONTINUE; } diff --git a/src/main/java/graphql/schema/GraphQLTypeUtil.java b/src/main/java/graphql/schema/GraphQLTypeUtil.java index 1306ce2448..c2a44d641c 100644 --- a/src/main/java/graphql/schema/GraphQLTypeUtil.java +++ b/src/main/java/graphql/schema/GraphQLTypeUtil.java @@ -27,18 +27,12 @@ public class GraphQLTypeUtil { */ public static String simplePrint(GraphQLType type) { Assert.assertNotNull(type, () -> "type can't be null"); - StringBuilder sb = new StringBuilder(); if (isNonNull(type)) { - sb.append(simplePrint(unwrapOne(type))); - sb.append("!"); + return simplePrint(unwrapOne(type)) + "!"; } else if (isList(type)) { - sb.append("["); - sb.append(simplePrint(unwrapOne(type))); - sb.append("]"); - } else { - sb.append(((GraphQLNamedType) type).getName()); + return "[" + simplePrint(unwrapOne(type)) + "]"; } - return sb.toString(); + return ((GraphQLNamedType) type).getName(); } public static String simplePrint(GraphQLSchemaElement schemaElement) { @@ -190,6 +184,7 @@ public static T unwrapOneAs(GraphQLType type) { /** * Unwraps all layers of the type or just returns the type again if it's not a wrapped type + * NOTE: This method does not support GraphQLTypeReference as input and will lead to a ClassCastException * * @param type the type to unwrapOne * diff --git a/src/main/java/graphql/schema/GraphQLUnionType.java b/src/main/java/graphql/schema/GraphQLUnionType.java index 452fcd2da9..9af647c6c4 100644 --- a/src/main/java/graphql/schema/GraphQLUnionType.java +++ b/src/main/java/graphql/schema/GraphQLUnionType.java @@ -3,7 +3,6 @@ import com.google.common.collect.ImmutableList; import graphql.Assert; -import graphql.DeprecatedAt; import graphql.DirectivesUtil; import graphql.Internal; import graphql.PublicApi; @@ -70,7 +69,7 @@ private GraphQLUnionType(String name, this.typeResolver = typeResolver; this.definition = definition; this.extensionDefinitions = ImmutableList.copyOf(extensionDefinitions); - this.directives = new DirectivesUtil.DirectivesHolder(directives, appliedDirectives); + this.directives = DirectivesUtil.DirectivesHolder.create(directives, appliedDirectives); } void replaceTypes(List types) { @@ -102,8 +101,7 @@ public boolean isPossibleType(GraphQLObjectType graphQLObjectType) { // to be removed in a future version when all code is in the code registry @Internal - @Deprecated - @DeprecatedAt("2018-12-03") + @Deprecated(since = "2018-12-03") TypeResolver getTypeResolver() { return typeResolver; } @@ -282,14 +280,12 @@ public Builder extensionDefinitions(List extension * * @deprecated use {@link graphql.schema.GraphQLCodeRegistry.Builder#typeResolver(GraphQLUnionType, TypeResolver)} instead */ - @Deprecated - @DeprecatedAt("2018-12-03") + @Deprecated(since = "2018-12-03") public Builder typeResolver(TypeResolver typeResolver) { this.typeResolver = typeResolver; return this; } - public Builder possibleType(GraphQLObjectType type) { assertNotNull(type, () -> "possible type can't be null"); types.put(type.getName(), type); diff --git a/src/main/java/graphql/schema/GraphqlDirectivesContainerTypeBuilder.java b/src/main/java/graphql/schema/GraphqlDirectivesContainerTypeBuilder.java index b42d2eb03a..b9db03ebb0 100644 --- a/src/main/java/graphql/schema/GraphqlDirectivesContainerTypeBuilder.java +++ b/src/main/java/graphql/schema/GraphqlDirectivesContainerTypeBuilder.java @@ -1,6 +1,5 @@ package graphql.schema; -import graphql.DeprecatedAt; import graphql.Internal; import java.util.ArrayList; @@ -50,8 +49,7 @@ public B withAppliedDirective(GraphQLAppliedDirective.Builder builder) { * * @deprecated - use the {@link GraphQLAppliedDirective} methods instead */ - @Deprecated - @DeprecatedAt("2022-02-24") + @Deprecated(since = "2022-02-24") public B replaceDirectives(List directives) { assertNotNull(directives, () -> "directive can't be null"); this.directives.clear(); @@ -66,8 +64,7 @@ public B replaceDirectives(List directives) { * * @deprecated - use the {@link GraphQLAppliedDirective} methods instead */ - @Deprecated - @DeprecatedAt("2022-02-24") + @Deprecated(since = "2022-02-24") public B withDirectives(GraphQLDirective... directives) { assertNotNull(directives, () -> "directives can't be null"); this.directives.clear(); @@ -84,8 +81,7 @@ public B withDirectives(GraphQLDirective... directives) { * * @deprecated - use the {@link GraphQLAppliedDirective} methods instead */ - @Deprecated - @DeprecatedAt("2022-02-24") + @Deprecated(since = "2022-02-24") public B withDirective(GraphQLDirective directive) { assertNotNull(directive, () -> "directive can't be null"); this.directives.add(directive); @@ -99,8 +95,7 @@ public B withDirective(GraphQLDirective directive) { * * @deprecated - use the {@link GraphQLAppliedDirective} methods instead */ - @Deprecated - @DeprecatedAt("2022-02-24") + @Deprecated(since = "2022-02-24") public B withDirective(GraphQLDirective.Builder builder) { return withDirective(builder.build()); } diff --git a/src/main/java/graphql/schema/PropertyDataFetcherHelper.java b/src/main/java/graphql/schema/PropertyDataFetcherHelper.java index 2c38b5e127..0877d93688 100644 --- a/src/main/java/graphql/schema/PropertyDataFetcherHelper.java +++ b/src/main/java/graphql/schema/PropertyDataFetcherHelper.java @@ -13,13 +13,14 @@ public class PropertyDataFetcherHelper { private static final PropertyFetchingImpl impl = new PropertyFetchingImpl(DataFetchingEnvironment.class); + private static final Supplier ALWAYS_NULL = () -> null; public static Object getPropertyValue(String propertyName, Object object, GraphQLType graphQLType) { - return impl.getPropertyValue(propertyName, object, graphQLType, false, () -> null); + return impl.getPropertyValue(propertyName, object, graphQLType, false, ALWAYS_NULL); } public static Object getPropertyValue(String propertyName, Object object, GraphQLType graphQLType, Supplier environment) { - return impl.getPropertyValue(propertyName, object, graphQLType, true, environment::get); + return impl.getPropertyValue(propertyName, object, graphQLType, true, environment); } public static void clearReflectionCache() { diff --git a/src/main/java/graphql/schema/PropertyFetchingImpl.java b/src/main/java/graphql/schema/PropertyFetchingImpl.java index da8b153e3d..6126159cf0 100644 --- a/src/main/java/graphql/schema/PropertyFetchingImpl.java +++ b/src/main/java/graphql/schema/PropertyFetchingImpl.java @@ -3,6 +3,8 @@ import graphql.GraphQLException; import graphql.Internal; import graphql.schema.fetching.LambdaFetchingSupport; +import graphql.util.EscapeUtil; +import graphql.util.StringKit; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -12,6 +14,7 @@ import java.lang.reflect.Modifier; import java.util.Arrays; import java.util.Comparator; +import java.util.Locale; import java.util.Map; import java.util.Objects; import java.util.Optional; @@ -32,8 +35,6 @@ */ @Internal public class PropertyFetchingImpl { - private static final Logger log = LoggerFactory.getLogger(PropertyFetchingImpl.class); - private final AtomicBoolean USE_SET_ACCESSIBLE = new AtomicBoolean(true); private final AtomicBoolean USE_LAMBDA_FACTORY = new AtomicBoolean(true); private final AtomicBoolean USE_NEGATIVE_CACHE = new AtomicBoolean(true); @@ -65,7 +66,7 @@ private static final class CachedLambdaFunction { } } - public Object getPropertyValue(String propertyName, Object object, GraphQLType graphQLType, boolean dfeInUse, Supplier singleArgumentValue) { + public Object getPropertyValue(String propertyName, Object object, GraphQLType graphQLType, boolean dfeInUse, Supplier singleArgumentValue) { if (object instanceof Map) { return ((Map) object).get(propertyName); } @@ -123,8 +124,6 @@ public Object getPropertyValue(String propertyName, Object object, GraphQLType g // are preventing the Meta Lambda from working. So let's continue with // old skool reflection and if it's all broken there then it will eventually // end up negatively cached - log.debug("Unable to invoke fast Meta Lambda for `{}` - Falling back to reflection", object.getClass().getName(), ignored); - } } @@ -195,12 +194,12 @@ private interface MethodFinder { Method apply(Class aClass, String s) throws NoSuchMethodException; } - private Object getPropertyViaRecordMethod(Object object, String propertyName, MethodFinder methodFinder, Supplier singleArgumentValue) throws NoSuchMethodException { + private Object getPropertyViaRecordMethod(Object object, String propertyName, MethodFinder methodFinder, Supplier singleArgumentValue) throws NoSuchMethodException { Method method = methodFinder.apply(object.getClass(), propertyName); return invokeMethod(object, singleArgumentValue, method, takesSingleArgumentTypeAsOnlyArgument(method)); } - private Object getPropertyViaGetterMethod(Object object, String propertyName, GraphQLType graphQLType, MethodFinder methodFinder, Supplier singleArgumentValue) throws NoSuchMethodException { + private Object getPropertyViaGetterMethod(Object object, String propertyName, GraphQLType graphQLType, MethodFinder methodFinder, Supplier singleArgumentValue) throws NoSuchMethodException { if (isBooleanProperty(graphQLType)) { try { return getPropertyViaGetterUsingPrefix(object, propertyName, "is", methodFinder, singleArgumentValue); @@ -212,8 +211,8 @@ private Object getPropertyViaGetterMethod(Object object, String propertyName, Gr } } - private Object getPropertyViaGetterUsingPrefix(Object object, String propertyName, String prefix, MethodFinder methodFinder, Supplier singleArgumentValue) throws NoSuchMethodException { - String getterName = prefix + propertyName.substring(0, 1).toUpperCase() + propertyName.substring(1); + private Object getPropertyViaGetterUsingPrefix(Object object, String propertyName, String prefix, MethodFinder methodFinder, Supplier singleArgumentValue) throws NoSuchMethodException { + String getterName = prefix + StringKit.capitalize(propertyName); Method method = methodFinder.apply(object.getClass(), getterName); return invokeMethod(object, singleArgumentValue, method, takesSingleArgumentTypeAsOnlyArgument(method)); } @@ -341,7 +340,7 @@ private Object getPropertyViaFieldAccess(CacheKey cacheKey, Object object, Strin } } - private Object invokeMethod(Object object, Supplier singleArgumentValue, Method method, boolean takesSingleArgument) throws FastNoSuchMethodException { + private Object invokeMethod(Object object, Supplier singleArgumentValue, Method method, boolean takesSingleArgument) throws FastNoSuchMethodException { try { if (takesSingleArgument) { Object argValue = singleArgumentValue.get(); diff --git a/src/main/java/graphql/schema/SchemaTransformer.java b/src/main/java/graphql/schema/SchemaTransformer.java index 06b6cfe08b..31846b6f85 100644 --- a/src/main/java/graphql/schema/SchemaTransformer.java +++ b/src/main/java/graphql/schema/SchemaTransformer.java @@ -322,7 +322,7 @@ public void updateZipper(NodeZipper currentZipper, List>> currentBreadcrumbs = breadcrumbsByZipper.get(currentZipper); - assertNotNull(currentBreadcrumbs, () -> format("No breadcrumbs found for zipper %s", currentZipper)); + assertNotNull(currentBreadcrumbs, "No breadcrumbs found for zipper %s", currentZipper); for (List> breadcrumbs : currentBreadcrumbs) { GraphQLSchemaElement parent = breadcrumbs.get(0).getNode(); zipperByParent.remove(parent, currentZipper); @@ -392,7 +392,7 @@ private boolean zipUpToDummyRoot(List> zippers, } NodeZipper curZipperForElement = nodeToZipper.get(element); - assertNotNull(curZipperForElement, () -> format("curZipperForElement is null for parentNode %s", element)); + assertNotNull(curZipperForElement, "curZipperForElement is null for parentNode %s", element); relevantZippers.updateZipper(curZipperForElement, newZipper); } diff --git a/src/main/java/graphql/schema/diff/DiffSet.java b/src/main/java/graphql/schema/diff/DiffSet.java index 3823c30d59..68c06aa13d 100644 --- a/src/main/java/graphql/schema/diff/DiffSet.java +++ b/src/main/java/graphql/schema/diff/DiffSet.java @@ -1,7 +1,6 @@ package graphql.schema.diff; import graphql.Assert; -import graphql.DeprecatedAt; import graphql.ExecutionResult; import graphql.GraphQL; import graphql.PublicApi; @@ -16,8 +15,7 @@ * {@link graphql.introspection.IntrospectionQuery}. */ @PublicApi -@Deprecated -@DeprecatedAt("2023-10-04") +@Deprecated(since = "2023-10-04") public class DiffSet { private final Map introspectionOld; diff --git a/src/main/java/graphql/schema/diff/SchemaDiff.java b/src/main/java/graphql/schema/diff/SchemaDiff.java index 9012c217f9..4676c2d836 100644 --- a/src/main/java/graphql/schema/diff/SchemaDiff.java +++ b/src/main/java/graphql/schema/diff/SchemaDiff.java @@ -1,7 +1,6 @@ package graphql.schema.diff; import graphql.Assert; -import graphql.DeprecatedAt; import graphql.PublicSpi; import graphql.introspection.IntrospectionResultToSchema; import graphql.language.Argument; @@ -39,6 +38,7 @@ import static graphql.language.TypeKind.getTypeKind; import static graphql.schema.idl.TypeInfo.getAstDesc; import static graphql.schema.idl.TypeInfo.typeInfo; +import static graphql.util.StringKit.capitalize; /** * The SchemaDiff is called with a {@link DiffSet} and will report the @@ -121,8 +121,7 @@ public SchemaDiff(Options options) { * * @return the number of API breaking changes */ - @Deprecated - @DeprecatedAt("2023-10-04") + @Deprecated(since = "2023-10-04") @SuppressWarnings("unchecked") public int diffSchema(DiffSet diffSet, DifferenceReporter reporter) { CountingReporter countingReporter = new CountingReporter(reporter); @@ -974,15 +973,6 @@ private Map sortedMap(List listOfNamedThings, Function(map); } - private static String capitalize(String name) { - if (name != null && name.length() != 0) { - char[] chars = name.toCharArray(); - chars[0] = Character.toUpperCase(chars[0]); - return new String(chars); - } else { - return name; - } - } private String mkDotName(String... objectNames) { return String.join(".", objectNames); diff --git a/src/main/java/graphql/schema/diffing/SchemaGraph.java b/src/main/java/graphql/schema/diffing/SchemaGraph.java index edaebe5284..02b07ca6b9 100644 --- a/src/main/java/graphql/schema/diffing/SchemaGraph.java +++ b/src/main/java/graphql/schema/diffing/SchemaGraph.java @@ -133,6 +133,9 @@ public List getAdjacentVerticesInverse(Vertex to, Predicate pred return result; } + public List getAdjacentEdges(Vertex from) { + return getAdjacentEdges(from, x -> true); + } public List getAdjacentEdges(Vertex from, Predicate predicate) { List result = new ArrayList<>(); for (Edge edge : edgesByDirection.row(from).values()) { @@ -230,31 +233,31 @@ public List addIsolatedVertices(int count, String debugPrefix) { public Vertex getFieldOrDirectiveForArgument(Vertex argument) { List adjacentVertices = getAdjacentVerticesInverse(argument); - assertTrue(adjacentVertices.size() == 1, () -> format("No field or directive found for %s", argument)); + assertTrue(adjacentVertices.size() == 1, "No field or directive found for %s", argument); return adjacentVertices.get(0); } public Vertex getFieldsContainerForField(Vertex field) { List adjacentVertices = getAdjacentVerticesInverse(field); - assertTrue(adjacentVertices.size() == 1, () -> format("No fields container found for %s", field)); + assertTrue(adjacentVertices.size() == 1, "No fields container found for %s", field); return adjacentVertices.get(0); } public Vertex getInputObjectForInputField(Vertex inputField) { List adjacentVertices = this.getAdjacentVerticesInverse(inputField); - assertTrue(adjacentVertices.size() == 1, () -> format("No input object found for %s", inputField)); + assertTrue(adjacentVertices.size() == 1, "No input object found for %s", inputField); return adjacentVertices.get(0); } public Vertex getAppliedDirectiveForAppliedArgument(Vertex appliedArgument) { List adjacentVertices = this.getAdjacentVerticesInverse(appliedArgument); - assertTrue(adjacentVertices.size() == 1, () -> format("No applied directive found for %s", appliedArgument)); + assertTrue(adjacentVertices.size() == 1, "No applied directive found for %s", appliedArgument); return adjacentVertices.get(0); } public Vertex getAppliedDirectiveContainerForAppliedDirective(Vertex appliedDirective) { List adjacentVertices = this.getAdjacentVerticesInverse(appliedDirective); - assertTrue(adjacentVertices.size() == 1, () -> format("No applied directive container found for %s", appliedDirective)); + assertTrue(adjacentVertices.size() == 1, "No applied directive container found for %s", appliedDirective); return adjacentVertices.get(0); } @@ -266,19 +269,19 @@ public Vertex getAppliedDirectiveContainerForAppliedDirective(Vertex appliedDire */ public Vertex getSingleAdjacentInverseVertex(Vertex input) { Collection adjacentVertices = this.getAdjacentEdgesInverseNonCopy(input); - assertTrue(adjacentVertices.size() == 1, () -> format("No parent found for %s", input)); + assertTrue(adjacentVertices.size() == 1, "No parent found for %s", input); return adjacentVertices.iterator().next().getFrom(); } public int getAppliedDirectiveIndex(Vertex appliedDirective) { List adjacentEdges = this.getAdjacentEdgesInverseCopied(appliedDirective); - assertTrue(adjacentEdges.size() == 1, () -> format("No applied directive container found for %s", appliedDirective)); + assertTrue(adjacentEdges.size() == 1, "No applied directive container found for %s", appliedDirective); return Integer.parseInt(adjacentEdges.get(0).getLabel()); } public Vertex getEnumForEnumValue(Vertex enumValue) { List adjacentVertices = this.getAdjacentVerticesInverse(enumValue); - assertTrue(adjacentVertices.size() == 1, () -> format("No enum found for %s", enumValue)); + assertTrue(adjacentVertices.size() == 1, "No enum found for %s", enumValue); return adjacentVertices.get(0); } @@ -295,4 +298,7 @@ public List getAllAdjacentEdges(List fromList, Vertex to) { return result; } + public boolean containsEdge(Vertex from, Vertex to) { + return this.edges.stream().anyMatch(edge -> edge.getFrom().equals(from) && edge.getTo().equals(to)); + } } diff --git a/src/main/java/graphql/schema/diffing/Vertex.java b/src/main/java/graphql/schema/diffing/Vertex.java index 3a39a34e62..4b408a37c1 100644 --- a/src/main/java/graphql/schema/diffing/Vertex.java +++ b/src/main/java/graphql/schema/diffing/Vertex.java @@ -62,7 +62,7 @@ public T getProperty(String name) { } public String getName() { - return (String) Assert.assertNotNull(properties.get("name"), () -> String.format("should not call getName on %s", this)); + return (String) Assert.assertNotNull(properties.get("name"), "should not call getName on %s", this); } public Map getProperties() { diff --git a/src/main/java/graphql/schema/diffing/ana/EditOperationAnalyzer.java b/src/main/java/graphql/schema/diffing/ana/EditOperationAnalyzer.java index 97e6a324aa..d1da9830d1 100644 --- a/src/main/java/graphql/schema/diffing/ana/EditOperationAnalyzer.java +++ b/src/main/java/graphql/schema/diffing/ana/EditOperationAnalyzer.java @@ -9,6 +9,7 @@ import graphql.schema.diffing.Mapping; import graphql.schema.diffing.SchemaGraph; import graphql.schema.diffing.Vertex; +import graphql.schema.diffing.ana.SchemaDifference.AppliedDirectiveArgumentAddition; import graphql.schema.idl.ScalarInfo; import java.util.ArrayList; @@ -18,6 +19,7 @@ import java.util.Map; import java.util.function.Predicate; +import static graphql.Assert.assertShouldNeverHappen; import static graphql.Assert.assertTrue; import static graphql.schema.diffing.ana.SchemaDifference.AppliedDirectiveAddition; import static graphql.schema.diffing.ana.SchemaDifference.AppliedDirectiveArgumentDeletion; @@ -207,6 +209,9 @@ private void handleArgumentChanges(List editOperations, Mapping m private void handleAppliedDirectives(List editOperations, Mapping mapping) { + // first the applied directives itself and then all the applied arguments changes + // for the applied directives, so that we check for example if the applied directive is + // deleted before we check for the applied directive argument changes for (EditOperation editOperation : editOperations) { switch (editOperation.getOperation()) { case INSERT_VERTEX: @@ -214,15 +219,31 @@ private void handleAppliedDirectives(List editOperations, Mapping appliedDirectiveAdded(editOperation); } break; + case CHANGE_VERTEX: + // TODO: handle applied directive changes + break; + case DELETE_VERTEX: + if (editOperation.getSourceVertex().isOfType(SchemaGraph.APPLIED_DIRECTIVE)) { + appliedDirectiveDeleted(editOperation); + } + break; + + } + } + for (EditOperation editOperation : editOperations) { + switch (editOperation.getOperation()) { + case INSERT_VERTEX: + if (editOperation.getTargetVertex().isOfType(SchemaGraph.APPLIED_ARGUMENT)) { + appliedDirectiveArgumentAdded(editOperation); + } + break; case CHANGE_VERTEX: if (editOperation.getTargetVertex().isOfType(SchemaGraph.APPLIED_ARGUMENT)) { appliedDirectiveArgumentChanged(editOperation); } break; case DELETE_VERTEX: - if (editOperation.getSourceVertex().isOfType(SchemaGraph.APPLIED_DIRECTIVE)) { - appliedDirectiveDeleted(editOperation); - } else if (editOperation.getSourceVertex().isOfType(SchemaGraph.APPLIED_ARGUMENT)) { + if (editOperation.getSourceVertex().isOfType(SchemaGraph.APPLIED_ARGUMENT)) { appliedDirectiveArgumentDeleted(editOperation); } break; @@ -319,7 +340,60 @@ private void appliedDirectiveArgumentDeleted(EditOperation editOperation) { Vertex appliedDirective = oldSchemaGraph.getAppliedDirectiveForAppliedArgument(deletedArgument); Vertex container = oldSchemaGraph.getAppliedDirectiveContainerForAppliedDirective(appliedDirective); - if (container.isOfType(SchemaGraph.FIELD)) { + if (container.isOfType(SchemaGraph.ARGUMENT)) { + Vertex argument = container; + Vertex fieldOrDirective = oldSchemaGraph.getFieldOrDirectiveForArgument(argument); + if (fieldOrDirective.isOfType(SchemaGraph.FIELD)) { + Vertex field = fieldOrDirective; + Vertex fieldsContainer = oldSchemaGraph.getFieldsContainerForField(field); + if (fieldsContainer.isOfType(SchemaGraph.OBJECT)) { + Vertex object = fieldsContainer; + if (isObjectDeleted(object.getName())) { + return; + } + if (isFieldDeletedFromExistingObject(object.getName(), field.getName())) { + return; + } + if (isArgumentDeletedFromExistingObjectField(object.getName(), field.getName(), argument.getName())) { + return; + } + if (isAppliedDirectiveDeleted(object, appliedDirective.getName())) { + return; + } + AppliedDirectiveObjectFieldArgumentLocation location = new AppliedDirectiveObjectFieldArgumentLocation(object.getName(), field.getName(), argument.getName(), appliedDirective.getName()); + getObjectModification(object.getName()).getDetails().add(new AppliedDirectiveArgumentDeletion(location, deletedArgument.getName())); + } else if (fieldsContainer.isOfType(SchemaGraph.INTERFACE)) { + Vertex interfaze = fieldsContainer; + if (isInterfaceDeleted(interfaze.getName())) { + return; + } + if (isFieldNewForExistingInterface(interfaze.getName(), field.getName())) { + return; + } + if (isArgumentDeletedFromExistingInterfaceField(interfaze.getName(), field.getName(), argument.getName())) { + return; + } + if (isAppliedDirectiveDeleted(interfaze, appliedDirective.getName())) { + return; + } + AppliedDirectiveInterfaceFieldArgumentLocation location = new AppliedDirectiveInterfaceFieldArgumentLocation(interfaze.getName(), field.getName(), argument.getName(), appliedDirective.getName()); + getInterfaceModification(interfaze.getName()).getDetails().add(new AppliedDirectiveArgumentDeletion(location, deletedArgument.getName())); + } + } else if (fieldOrDirective.isOfType(SchemaGraph.DIRECTIVE)) { + Vertex directive = fieldOrDirective; + if (isDirectiveDeleted(directive.getName())) { + return; + } + if (isArgumentDeletedFromExistingDirective(directive.getName(), argument.getName())) { + return; + } + if (isAppliedDirectiveDeleted(fieldOrDirective, appliedDirective.getName())) { + return; + } + AppliedDirectiveDirectiveArgumentLocation location = new AppliedDirectiveDirectiveArgumentLocation(directive.getName(), argument.getName(), appliedDirective.getName()); + getDirectiveModification(directive.getName()).getDetails().add(new AppliedDirectiveArgumentDeletion(location, deletedArgument.getName())); + } + } else if (container.isOfType(SchemaGraph.FIELD)) { Vertex field = container; Vertex interfaceOrObjective = oldSchemaGraph.getFieldsContainerForField(field); if (interfaceOrObjective.isOfType(SchemaGraph.OBJECT)) { @@ -327,6 +401,13 @@ private void appliedDirectiveArgumentDeleted(EditOperation editOperation) { if (isObjectDeleted(object.getName())) { return; } + if (isFieldDeletedFromExistingObject(object.getName(), field.getName())) { + return; + } + if (isAppliedDirectiveDeleted(object, appliedDirective.getName())) { + return; + } + AppliedDirectiveObjectFieldLocation location = new AppliedDirectiveObjectFieldLocation(object.getName(), field.getName(), appliedDirective.getName()); getObjectModification(object.getName()).getDetails().add(new AppliedDirectiveArgumentDeletion(location, deletedArgument.getName())); } else { @@ -335,12 +416,291 @@ private void appliedDirectiveArgumentDeleted(EditOperation editOperation) { if (isInterfaceDeleted(interfaze.getName())) { return; } + if (isFieldDeletedFromExistingInterface(interfaze.getName(), field.getName())) { + return; + } + if (isAppliedDirectiveDeleted(interfaze, appliedDirective.getName())) { + return; + } + AppliedDirectiveInterfaceFieldLocation location = new AppliedDirectiveInterfaceFieldLocation(interfaze.getName(), field.getName(), appliedDirective.getName()); getInterfaceModification(interfaze.getName()).getDetails().add(new AppliedDirectiveArgumentDeletion(location, deletedArgument.getName())); } + } else if (container.isOfType(SchemaGraph.SCALAR)) { + Vertex scalar = container; + if (isScalarDeleted(scalar.getName())) { + return; + } + if (isAppliedDirectiveDeleted(scalar, appliedDirective.getName())) { + return; + } + + AppliedDirectiveScalarLocation location = new AppliedDirectiveScalarLocation(scalar.getName(), appliedDirective.getName()); + getScalarModification(scalar.getName()).getDetails().add(new AppliedDirectiveArgumentDeletion(location, deletedArgument.getName())); + } else if (container.isOfType(SchemaGraph.ENUM)) { + Vertex enumVertex = container; + if (isEnumDeleted(enumVertex.getName())) { + return; + } + if (isAppliedDirectiveDeleted(enumVertex, appliedDirective.getName())) { + return; + } + AppliedDirectiveEnumLocation location = new AppliedDirectiveEnumLocation(enumVertex.getName(), appliedDirective.getName()); + getEnumModification(enumVertex.getName()).getDetails().add(new AppliedDirectiveArgumentDeletion(location, deletedArgument.getName())); + } else if (container.isOfType(SchemaGraph.ENUM_VALUE)) { + Vertex enumValue = container; + Vertex enumVertex = oldSchemaGraph.getEnumForEnumValue(enumValue); + if (isEnumDeleted(enumVertex.getName())) { + return; + } + if (isNewEnumValueForExistingEnum(enumVertex.getName(), enumValue.getName())) { + return; + } + AppliedDirectiveEnumValueLocation location = new AppliedDirectiveEnumValueLocation(enumVertex.getName(), enumValue.getName(), appliedDirective.getName()); + getEnumModification(enumVertex.getName()).getDetails().add(new AppliedDirectiveArgumentDeletion(location, deletedArgument.getName())); + } else if (container.isOfType(SchemaGraph.UNION)) { + Vertex union = container; + if (isUnionDeleted(union.getName())) { + return; + } + if (isAppliedDirectiveDeleted(union, appliedDirective.getName())) { + return; + } + + AppliedDirectiveUnionLocation location = new AppliedDirectiveUnionLocation(union.getName(), appliedDirective.getName()); + getUnionModification(union.getName()).getDetails().add(new AppliedDirectiveArgumentDeletion(location, deletedArgument.getName())); + } else if (container.isOfType(SchemaGraph.OBJECT)) { + Vertex object = container; + if (isObjectDeleted(object.getName())) { + return; + } + if (isAppliedDirectiveDeleted(object, appliedDirective.getName())) { + return; + } + + AppliedDirectiveObjectLocation location = new AppliedDirectiveObjectLocation(object.getName(), appliedDirective.getName()); + getObjectModification(object.getName()).getDetails().add(new AppliedDirectiveArgumentDeletion(location, deletedArgument.getName())); + } else if (container.isOfType(SchemaGraph.INTERFACE)) { + Vertex interfaze = container; + if (isInterfaceDeleted(interfaze.getName())) { + return; + } + if (isAppliedDirectiveDeleted(interfaze, appliedDirective.getName())) { + return; + } + + AppliedDirectiveInterfaceLocation location = new AppliedDirectiveInterfaceLocation(interfaze.getName(), appliedDirective.getName()); + getInterfaceModification(interfaze.getName()).getDetails().add(new AppliedDirectiveArgumentDeletion(location, deletedArgument.getName())); + } else if (container.isOfType(SchemaGraph.INPUT_OBJECT)) { + Vertex inputObject = container; + if (isInputObjectDeleted(inputObject.getName())) { + return; + } + if (isAppliedDirectiveDeleted(inputObject, appliedDirective.getName())) { + return; + } + AppliedDirectiveInputObjectLocation location = new AppliedDirectiveInputObjectLocation(inputObject.getName(), appliedDirective.getName()); + getInputObjectModification(inputObject.getName()).getDetails().add(new AppliedDirectiveArgumentDeletion(location, deletedArgument.getName())); + } else if (container.isOfType(SchemaGraph.INPUT_FIELD)) { + Vertex inputField = container; + Vertex inputObject = oldSchemaGraph.getInputObjectForInputField(inputField); + if (isInputObjectDeleted(inputObject.getName())) { + return; + } + if (isNewInputFieldExistingInputObject(inputObject.getName(), inputField.getName())) { + return; + } + if (isAppliedDirectiveDeleted(inputField, appliedDirective.getName())) { + return; + } + AppliedDirectiveInputObjectFieldLocation location = new AppliedDirectiveInputObjectFieldLocation(inputObject.getName(), inputField.getName(), appliedDirective.getName()); + getInputObjectModification(inputObject.getName()).getDetails().add(new AppliedDirectiveArgumentDeletion(location, deletedArgument.getName())); + } else { + assertShouldNeverHappen("Unexpected container " + container); + } + } + + private void appliedDirectiveArgumentAdded(EditOperation editOperation) { + Vertex addedArgument = editOperation.getTargetVertex(); + Vertex appliedDirective = newSchemaGraph.getAppliedDirectiveForAppliedArgument(addedArgument); + Vertex container = newSchemaGraph.getAppliedDirectiveContainerForAppliedDirective(appliedDirective); + + if (container.isOfType(SchemaGraph.ARGUMENT)) { + Vertex argument = container; + Vertex fieldOrDirective = newSchemaGraph.getFieldOrDirectiveForArgument(argument); + if (fieldOrDirective.isOfType(SchemaGraph.FIELD)) { + Vertex field = fieldOrDirective; + Vertex fieldsContainer = newSchemaGraph.getFieldsContainerForField(field); + if (fieldsContainer.isOfType(SchemaGraph.OBJECT)) { + Vertex object = fieldsContainer; + if (isObjectAdded(object.getName())) { + return; + } + if (isFieldNewForExistingObject(object.getName(), field.getName())) { + return; + } + if (isArgumentNewForExistingObjectField(object.getName(), field.getName(), argument.getName())) { + return; + } + if (isAppliedDirectiveAdded(object, appliedDirective.getName())) { + return; + } + AppliedDirectiveObjectFieldArgumentLocation location = new AppliedDirectiveObjectFieldArgumentLocation(object.getName(), field.getName(), argument.getName(), appliedDirective.getName()); + getObjectModification(object.getName()).getDetails().add(new AppliedDirectiveArgumentAddition(location, addedArgument.getName())); + } else if (fieldsContainer.isOfType(SchemaGraph.INTERFACE)) { + Vertex interfaze = fieldsContainer; + if (isInterfaceAdded(interfaze.getName())) { + return; + } + if (isFieldNewForExistingInterface(interfaze.getName(), field.getName())) { + return; + } + if (isArgumentNewForExistingInterfaceField(interfaze.getName(), field.getName(), argument.getName())) { + return; + } + + if (isAppliedDirectiveAdded(interfaze, appliedDirective.getName())) { + return; + } + AppliedDirectiveInterfaceFieldArgumentLocation location = new AppliedDirectiveInterfaceFieldArgumentLocation(interfaze.getName(), field.getName(), argument.getName(), appliedDirective.getName()); + getInterfaceModification(interfaze.getName()).getDetails().add(new AppliedDirectiveArgumentAddition(location, addedArgument.getName())); + } + } else if (fieldOrDirective.isOfType(SchemaGraph.DIRECTIVE)) { + Vertex directive = fieldOrDirective; + if (isAppliedDirectiveAdded(directive, appliedDirective.getName())) { + return; + } + AppliedDirectiveDirectiveArgumentLocation location = new AppliedDirectiveDirectiveArgumentLocation(directive.getName(), argument.getName(), appliedDirective.getName()); + getDirectiveModification(directive.getName()).getDetails().add(new AppliedDirectiveArgumentAddition(location, addedArgument.getName())); + } + } else if (container.isOfType(SchemaGraph.FIELD)) { + Vertex field = container; + Vertex interfaceOrObjective = newSchemaGraph.getFieldsContainerForField(field); + if (interfaceOrObjective.isOfType(SchemaGraph.OBJECT)) { + Vertex object = interfaceOrObjective; + if (isObjectAdded(object.getName())) { + return; + } + if (isFieldNewForExistingObject(object.getName(), field.getName())) { + return; + } + if (isAppliedDirectiveAdded(container, appliedDirective.getName())) { + return; + } + + AppliedDirectiveObjectFieldLocation location = new AppliedDirectiveObjectFieldLocation(object.getName(), field.getName(), appliedDirective.getName()); + getObjectModification(object.getName()).getDetails().add(new AppliedDirectiveArgumentAddition(location, addedArgument.getName())); + } else if (interfaceOrObjective.isOfType(SchemaGraph.INTERFACE)) { + Vertex interfaze = interfaceOrObjective; + if (isInterfaceAdded(interfaze.getName())) { + return; + } + if (isFieldNewForExistingInterface(interfaze.getName(), field.getName())) { + return; + } + if (isAppliedDirectiveAdded(container, appliedDirective.getName())) { + return; + } + + AppliedDirectiveInterfaceFieldLocation location = new AppliedDirectiveInterfaceFieldLocation(interfaze.getName(), field.getName(), appliedDirective.getName()); + getInterfaceModification(interfaze.getName()).getDetails().add(new AppliedDirectiveArgumentAddition(location, addedArgument.getName())); + } else { + assertShouldNeverHappen("Unexpected field container " + interfaceOrObjective); + } + } else if (container.isOfType(SchemaGraph.SCALAR)) { + Vertex scalar = container; + if (isScalarAdded(scalar.getName())) { + return; + } + if (isAppliedDirectiveAdded(container, appliedDirective.getName())) { + return; + } + AppliedDirectiveScalarLocation location = new AppliedDirectiveScalarLocation(scalar.getName(), appliedDirective.getName()); + getScalarModification(scalar.getName()).getDetails().add(new AppliedDirectiveArgumentAddition(location, addedArgument.getName())); + } else if (container.isOfType(SchemaGraph.ENUM)) { + Vertex enumVertex = container; + if (isEnumAdded(enumVertex.getName())) { + return; + } + if (isAppliedDirectiveAdded(container, appliedDirective.getName())) { + return; + } + AppliedDirectiveEnumLocation location = new AppliedDirectiveEnumLocation(enumVertex.getName(), appliedDirective.getName()); + getEnumModification(enumVertex.getName()).getDetails().add(new AppliedDirectiveArgumentAddition(location, addedArgument.getName())); + } else if (container.isOfType(SchemaGraph.ENUM_VALUE)) { + Vertex enumValue = container; + Vertex enumVertex = newSchemaGraph.getEnumForEnumValue(enumValue); + if (isEnumAdded(enumVertex.getName())) { + return; + } + if (isNewEnumValueForExistingEnum(enumVertex.getName(), enumValue.getName())) { + return; + } + if (isAppliedDirectiveAdded(container, appliedDirective.getName())) { + return; + } + AppliedDirectiveEnumValueLocation location = new AppliedDirectiveEnumValueLocation(enumVertex.getName(), enumValue.getName(), appliedDirective.getName()); + getEnumModification(enumVertex.getName()).getDetails().add(new AppliedDirectiveArgumentAddition(location, addedArgument.getName())); + } else if (container.isOfType(SchemaGraph.UNION)) { + Vertex union = container; + if (isUnionAdded(union.getName())) { + return; + } + if (isAppliedDirectiveAdded(container, appliedDirective.getName())) { + return; + } + AppliedDirectiveUnionLocation location = new AppliedDirectiveUnionLocation(union.getName(), appliedDirective.getName()); + getUnionModification(union.getName()).getDetails().add(new AppliedDirectiveArgumentAddition(location, addedArgument.getName())); + } else if (container.isOfType(SchemaGraph.INTERFACE)) { + Vertex interfaze = container; + if (isInterfaceAdded(interfaze.getName())) { + return; + } + if (isAppliedDirectiveAdded(container, appliedDirective.getName())) { + return; + } + AppliedDirectiveInterfaceLocation location = new AppliedDirectiveInterfaceLocation(interfaze.getName(), appliedDirective.getName()); + getInterfaceModification(interfaze.getName()).getDetails().add(new AppliedDirectiveArgumentAddition(location, addedArgument.getName())); + } else if (container.isOfType(SchemaGraph.OBJECT)) { + Vertex interfaze = container; + if (isObjectAdded(interfaze.getName())) { + return; + } + if (isAppliedDirectiveAdded(container, appliedDirective.getName())) { + return; + } + AppliedDirectiveObjectLocation location = new AppliedDirectiveObjectLocation(interfaze.getName(), appliedDirective.getName()); + getObjectModification(interfaze.getName()).getDetails().add(new AppliedDirectiveArgumentAddition(location, addedArgument.getName())); + } else if (container.isOfType(SchemaGraph.INPUT_OBJECT)) { + Vertex inputObject = container; + if (isInputObjectAdded(inputObject.getName())) { + return; + } + if (isAppliedDirectiveAdded(container, appliedDirective.getName())) { + return; + } + AppliedDirectiveInputObjectLocation location = new AppliedDirectiveInputObjectLocation(inputObject.getName(), appliedDirective.getName()); + getInputObjectModification(inputObject.getName()).getDetails().add(new AppliedDirectiveArgumentAddition(location, addedArgument.getName())); + } else if (container.isOfType(SchemaGraph.INPUT_FIELD)) { + Vertex inputField = container; + Vertex inputObject = newSchemaGraph.getInputObjectForInputField(inputField); + if (isInputObjectAdded(inputObject.getName())) { + return; + } + if (isNewInputFieldExistingInputObject(inputObject.getName(), inputField.getName())) { + return; + } + if (isAppliedDirectiveAdded(container, appliedDirective.getName())) { + return; + } + AppliedDirectiveInputObjectFieldLocation location = new AppliedDirectiveInputObjectFieldLocation(inputObject.getName(), inputField.getName(), appliedDirective.getName()); + getInputObjectModification(inputObject.getName()).getDetails().add(new AppliedDirectiveArgumentAddition(location, addedArgument.getName())); + } else { + assertShouldNeverHappen("Unexpected applied argument container " + container); } } + private void appliedDirectiveArgumentChanged(EditOperation editOperation) { Vertex appliedArgument = editOperation.getTargetVertex(); String oldArgumentName = editOperation.getSourceVertex().getName(); @@ -368,7 +728,142 @@ private void appliedDirectiveArgumentChanged(EditOperation editOperation) { AppliedDirectiveArgumentRename argumentRename = new AppliedDirectiveArgumentRename(location, oldArgumentName, newArgumentName); getObjectModification(object.getName()).getDetails().add(argumentRename); } + } else if (interfaceOrObjective.isOfType(SchemaGraph.INTERFACE)) { + Vertex interfaze = interfaceOrObjective; + AppliedDirectiveInterfaceFieldLocation location = new AppliedDirectiveInterfaceFieldLocation(interfaze.getName(), field.getName(), appliedDirective.getName()); + if (valueChanged) { + AppliedDirectiveArgumentValueModification argumentValueModification = new AppliedDirectiveArgumentValueModification(location, newArgumentName, oldValue, newValue); + getInterfaceModification(interfaze.getName()).getDetails().add(argumentValueModification); + } + if (nameChanged) { + + } + } + } else if (container.isOfType(SchemaGraph.ARGUMENT)) { + Vertex argument = container; + Vertex fieldOrDirective = newSchemaGraph.getFieldOrDirectiveForArgument(argument); + if (fieldOrDirective.isOfType(SchemaGraph.FIELD)) { + Vertex field = fieldOrDirective; + Vertex fieldsContainer = newSchemaGraph.getFieldsContainerForField(field); + if (fieldsContainer.isOfType(SchemaGraph.OBJECT)) { + Vertex object = fieldsContainer; + AppliedDirectiveObjectFieldArgumentLocation location = new AppliedDirectiveObjectFieldArgumentLocation(object.getName(), field.getName(), argument.getName(), appliedDirective.getName()); + + if (valueChanged) { + AppliedDirectiveArgumentValueModification argumentValueModification = new AppliedDirectiveArgumentValueModification(location, newArgumentName, oldValue, newValue); + getObjectModification(object.getName()).getDetails().add(argumentValueModification); + } + if (nameChanged) { + + } + } else if (fieldsContainer.isOfType(SchemaGraph.INTERFACE)) { + Vertex interfaze = fieldsContainer; + AppliedDirectiveInterfaceFieldArgumentLocation location = new AppliedDirectiveInterfaceFieldArgumentLocation(interfaze.getName(), field.getName(), argument.getName(), appliedDirective.getName()); + + if (valueChanged) { + AppliedDirectiveArgumentValueModification argumentValueModification = new AppliedDirectiveArgumentValueModification(location, newArgumentName, oldValue, newValue); + getInterfaceModification(interfaze.getName()).getDetails().add(argumentValueModification); + } + if (nameChanged) { + + } + } + } else if (fieldOrDirective.isOfType(SchemaGraph.DIRECTIVE)) { + Vertex directive = fieldOrDirective; + AppliedDirectiveDirectiveArgumentLocation location = new AppliedDirectiveDirectiveArgumentLocation(directive.getName(), argument.getName(), appliedDirective.getName()); + if (valueChanged) { + AppliedDirectiveArgumentValueModification argumentValueModification = new AppliedDirectiveArgumentValueModification(location, newArgumentName, oldValue, newValue); + getDirectiveModification(directive.getName()).getDetails().add(argumentValueModification); + } + } + } else if (container.isOfType(SchemaGraph.INPUT_FIELD)) { + Vertex inputField = container; + Vertex inputObject = newSchemaGraph.getInputObjectForInputField(inputField); + AppliedDirectiveInputObjectFieldLocation location = new AppliedDirectiveInputObjectFieldLocation(inputObject.getName(), inputField.getName(), appliedDirective.getName()); + if (valueChanged) { + AppliedDirectiveArgumentValueModification argumentValueModification = new AppliedDirectiveArgumentValueModification(location, newArgumentName, oldValue, newValue); + getInputObjectModification(inputObject.getName()).getDetails().add(argumentValueModification); + } + if (nameChanged) { +// AppliedDirectiveArgumentRename argumentRename = new AppliedDirectiveArgumentRename(location, oldArgumentName, newArgumentName); +// getInputObjectModification(inputObject.getName()).getDetails().add(argumentRename); + } + } else if (container.isOfType(SchemaGraph.OBJECT)) { + Vertex object = container; + AppliedDirectiveObjectLocation location = new AppliedDirectiveObjectLocation(object.getName(), appliedDirective.getName()); + if (valueChanged) { + AppliedDirectiveArgumentValueModification argumentValueModification = new AppliedDirectiveArgumentValueModification(location, newArgumentName, oldValue, newValue); + getObjectModification(object.getName()).getDetails().add(argumentValueModification); + } + if (nameChanged) { + } + } else if (container.isOfType(SchemaGraph.INTERFACE)) { + Vertex interfaze = container; + AppliedDirectiveInterfaceLocation location = new AppliedDirectiveInterfaceLocation(interfaze.getName(), appliedDirective.getName()); + if (valueChanged) { + AppliedDirectiveArgumentValueModification argumentValueModification = new AppliedDirectiveArgumentValueModification(location, newArgumentName, oldValue, newValue); + getInterfaceModification(interfaze.getName()).getDetails().add(argumentValueModification); + } + if (nameChanged) { + + } + + } else if (container.isOfType(SchemaGraph.INPUT_OBJECT)) { + Vertex inputObject = container; + AppliedDirectiveInputObjectLocation location = new AppliedDirectiveInputObjectLocation(inputObject.getName(), appliedDirective.getName()); + if (valueChanged) { + AppliedDirectiveArgumentValueModification argumentValueModification = new AppliedDirectiveArgumentValueModification(location, newArgumentName, oldValue, newValue); + getInputObjectModification(inputObject.getName()).getDetails().add(argumentValueModification); + } + if (nameChanged) { + + } + + } else if (container.isOfType(SchemaGraph.ENUM)) { + Vertex enumVertex = container; + AppliedDirectiveEnumLocation location = new AppliedDirectiveEnumLocation(enumVertex.getName(), appliedDirective.getName()); + if (valueChanged) { + AppliedDirectiveArgumentValueModification argumentValueModification = new AppliedDirectiveArgumentValueModification(location, newArgumentName, oldValue, newValue); + getEnumModification(enumVertex.getName()).getDetails().add(argumentValueModification); + } + if (nameChanged) { + + } + } else if (container.isOfType(SchemaGraph.ENUM_VALUE)) { + Vertex enumValue = container; + Vertex enumVertex = newSchemaGraph.getEnumForEnumValue(enumValue); + AppliedDirectiveEnumValueLocation location = new AppliedDirectiveEnumValueLocation(enumVertex.getName(), enumValue.getName(), appliedDirective.getName()); + if (valueChanged) { + AppliedDirectiveArgumentValueModification argumentValueModification = new AppliedDirectiveArgumentValueModification(location, newArgumentName, oldValue, newValue); + getEnumModification(enumVertex.getName()).getDetails().add(argumentValueModification); + } + if (nameChanged) { + + } + + } else if (container.isOfType(SchemaGraph.UNION)) { + Vertex union = container; + AppliedDirectiveUnionLocation location = new AppliedDirectiveUnionLocation(union.getName(), appliedDirective.getName()); + if (valueChanged) { + AppliedDirectiveArgumentValueModification argumentValueModification = new AppliedDirectiveArgumentValueModification(location, newArgumentName, oldValue, newValue); + getUnionModification(union.getName()).getDetails().add(argumentValueModification); + } + if (nameChanged) { + + } + } else if (container.isOfType(SchemaGraph.SCALAR)) { + Vertex scalar = container; + AppliedDirectiveScalarLocation location = new AppliedDirectiveScalarLocation(scalar.getName(), appliedDirective.getName()); + if (valueChanged) { + AppliedDirectiveArgumentValueModification argumentValueModification = new AppliedDirectiveArgumentValueModification(location, newArgumentName, oldValue, newValue); + getScalarModification(scalar.getName()).getDetails().add(argumentValueModification); + } + if (nameChanged) { + + } + } else { + assertShouldNeverHappen("Unexpected applied argument container " + container); } } @@ -536,7 +1031,7 @@ private void appliedDirectiveDeletedFromArgument(Vertex appliedDirective, Vertex if (isArgumentDeletedFromExistingDirective(directive.getName(), argument.getName())) { return; } - AppliedDirectiveDirectiveArgumentLocation location = new AppliedDirectiveDirectiveArgumentLocation(directive.getName(), argument.getName()); + AppliedDirectiveDirectiveArgumentLocation location = new AppliedDirectiveDirectiveArgumentLocation(directive.getName(), argument.getName(), appliedDirective.getName()); AppliedDirectiveDeletion appliedDirectiveDeletion = new AppliedDirectiveDeletion(location, appliedDirective.getName()); getDirectiveModification(directive.getName()).getDetails().add(appliedDirectiveDeletion); } @@ -587,7 +1082,7 @@ private void appliedDirectiveAddedToArgument(Vertex appliedDirective, Vertex con if (isArgumentNewForExistingDirective(directive.getName(), argument.getName())) { return; } - AppliedDirectiveDirectiveArgumentLocation location = new AppliedDirectiveDirectiveArgumentLocation(directive.getName(), argument.getName()); + AppliedDirectiveDirectiveArgumentLocation location = new AppliedDirectiveDirectiveArgumentLocation(directive.getName(), argument.getName(), appliedDirective.getName()); AppliedDirectiveAddition appliedDirectiveAddition = new AppliedDirectiveAddition(location, appliedDirective.getName()); getDirectiveModification(directive.getName()).getDetails().add(appliedDirectiveAddition); } @@ -1408,6 +1903,150 @@ private boolean isInputFieldAdded(String name) { return inputObjectDifferences.containsKey(name) && inputObjectDifferences.get(name) instanceof InputObjectAddition; } + private boolean isAppliedDirectiveAdded(Vertex container, String appliedDirectiveName) { + if (container.isOfType(SchemaGraph.SCALAR)) { + if (scalarDifferences.containsKey(container.getName())) { + ScalarDifference scalarDifference = scalarDifferences.get(container.getName()); + if (scalarDifference instanceof ScalarModification) { + ScalarModification scalarModification = (ScalarModification) scalarDifference; + List appliedDirectiveAdditions = scalarModification.getDetails(AppliedDirectiveAddition.class); + return appliedDirectiveAdditions.stream().anyMatch(addition -> addition.getName().equals(appliedDirectiveName)); + } + } + } else if (container.isOfType(SchemaGraph.ENUM)) { + if (enumDifferences.containsKey(container.getName())) { + EnumDifference enumDifference = enumDifferences.get(container.getName()); + if (enumDifference instanceof EnumModification) { + EnumModification enumModification = (EnumModification) enumDifference; + List appliedDirectiveAdditions = enumModification.getDetails(AppliedDirectiveAddition.class); + return appliedDirectiveAdditions.stream().anyMatch(addition -> addition.getName().equals(appliedDirectiveName)); + } + } + } else if (container.isOfType(SchemaGraph.OBJECT)) { + if (objectDifferences.containsKey(container.getName())) { + ObjectDifference objectDifference = objectDifferences.get(container.getName()); + if (objectDifference instanceof ObjectModification) { + ObjectModification objectModification = (ObjectModification) objectDifference; + List appliedDirectiveAdditions = objectModification.getDetails(AppliedDirectiveAddition.class); + return appliedDirectiveAdditions.stream().anyMatch(addition -> addition.getName().equals(appliedDirectiveName)); + } + } + } else if (container.isOfType(SchemaGraph.INTERFACE)) { + if (interfaceDifferences.containsKey(container.getName())) { + InterfaceDifference interfaceDifference = interfaceDifferences.get(container.getName()); + if (interfaceDifference instanceof InterfaceModification) { + InterfaceModification interfaceModification = (InterfaceModification) interfaceDifference; + List appliedDirectiveAdditions = interfaceModification.getDetails(AppliedDirectiveAddition.class); + return appliedDirectiveAdditions.stream().anyMatch(addition -> addition.getName().equals(appliedDirectiveName)); + } + } + } else if (container.isOfType(SchemaGraph.INPUT_OBJECT)) { + if (inputObjectDifferences.containsKey(container.getName())) { + InputObjectDifference inputObjectDifference = inputObjectDifferences.get(container.getName()); + if (inputObjectDifference instanceof InputObjectModification) { + InputObjectModification inputObjectModification = (InputObjectModification) inputObjectDifference; + List appliedDirectiveAdditions = inputObjectModification.getDetails(AppliedDirectiveAddition.class); + return appliedDirectiveAdditions.stream().anyMatch(addition -> addition.getName().equals(appliedDirectiveName)); + } + } + } else if (container.isOfType(SchemaGraph.UNION)) { + if (unionDifferences.containsKey(container.getName())) { + UnionDifference unionDifference = unionDifferences.get(container.getName()); + if (unionDifference instanceof UnionModification) { + UnionModification unionModification = (UnionModification) unionDifference; + List appliedDirectiveAdditions = unionModification.getDetails(AppliedDirectiveAddition.class); + return appliedDirectiveAdditions.stream().anyMatch(addition -> addition.getName().equals(appliedDirectiveName)); + } + } + } else if (container.isOfType(SchemaGraph.DIRECTIVE)) { + if (directiveDifferences.containsKey(container.getName())) { + DirectiveDifference directiveDifference = directiveDifferences.get(container.getName()); + if (directiveDifference instanceof DirectiveModification) { + DirectiveModification directiveModification = (DirectiveModification) directiveDifference; + List appliedDirectiveAdditions = directiveModification.getDetails(AppliedDirectiveAddition.class); + return appliedDirectiveAdditions.stream().anyMatch(addition -> addition.getName().equals(appliedDirectiveName)); + } + } + } + return false; + } + + + private boolean isAppliedDirectiveDeleted(Vertex rootContainer, String appliedDirectiveName) { +// if (rootContainer.isOfType(SchemaGraph.ARGUMENT)) { +// Vertex argument = rootContainer; +// Vertex fieldOrDirective = oldSchemaGraph.getFieldOrDirectiveForArgument(argument); +// if (fieldOrDirective.isOfType(SchemaGraph.DIRECTIVE)) { +// return isArgumentDeletedFromExistingDirective(fieldOrDirective.getName(), argument.getName()); +// } +// } + if (rootContainer.isOfType(SchemaGraph.SCALAR)) { + if (scalarDifferences.containsKey(rootContainer.getName())) { + ScalarDifference scalarDifference = scalarDifferences.get(rootContainer.getName()); + if (scalarDifference instanceof ScalarModification) { + ScalarModification scalarModification = (ScalarModification) scalarDifference; + List appliedDirectiveDeletions = scalarModification.getDetails(AppliedDirectiveDeletion.class); + return appliedDirectiveDeletions.stream().anyMatch(deletion -> deletion.getName().equals(appliedDirectiveName)); + } + } + } else if (rootContainer.isOfType(SchemaGraph.ENUM)) { + if (enumDifferences.containsKey(rootContainer.getName())) { + EnumDifference enumDifference = enumDifferences.get(rootContainer.getName()); + if (enumDifference instanceof EnumModification) { + EnumModification enumModification = (EnumModification) enumDifference; + List appliedDirectiveDeletions = enumModification.getDetails(AppliedDirectiveDeletion.class); + return appliedDirectiveDeletions.stream().anyMatch(deletion -> deletion.getName().equals(appliedDirectiveName)); + } + } + } else if (rootContainer.isOfType(SchemaGraph.OBJECT)) { + if (objectDifferences.containsKey(rootContainer.getName())) { + ObjectDifference objectDifference = objectDifferences.get(rootContainer.getName()); + if (objectDifference instanceof ObjectModification) { + ObjectModification objectModification = (ObjectModification) objectDifference; + List appliedDirectiveDeletions = objectModification.getDetails(AppliedDirectiveDeletion.class); + return appliedDirectiveDeletions.stream().anyMatch(deletion -> deletion.getName().equals(appliedDirectiveName)); + } + } + } else if (rootContainer.isOfType(SchemaGraph.INTERFACE)) { + if (interfaceDifferences.containsKey(rootContainer.getName())) { + InterfaceDifference interfaceDifference = interfaceDifferences.get(rootContainer.getName()); + if (interfaceDifference instanceof InterfaceModification) { + InterfaceModification interfaceModification = (InterfaceModification) interfaceDifference; + List appliedDirectiveDeletions = interfaceModification.getDetails(AppliedDirectiveDeletion.class); + return appliedDirectiveDeletions.stream().anyMatch(deletion -> deletion.getName().equals(appliedDirectiveName)); + } + } + } else if (rootContainer.isOfType(SchemaGraph.INPUT_OBJECT)) { + if (inputObjectDifferences.containsKey(rootContainer.getName())) { + InputObjectDifference inputObjectDifference = inputObjectDifferences.get(rootContainer.getName()); + if (inputObjectDifference instanceof InputObjectModification) { + InputObjectModification inputObjectModification = (InputObjectModification) inputObjectDifference; + List appliedDirectiveDeletions = inputObjectModification.getDetails(AppliedDirectiveDeletion.class); + return appliedDirectiveDeletions.stream().anyMatch(deletion -> deletion.getName().equals(appliedDirectiveName)); + } + } + } else if (rootContainer.isOfType(SchemaGraph.UNION)) { + if (unionDifferences.containsKey(rootContainer.getName())) { + UnionDifference unionDifference = unionDifferences.get(rootContainer.getName()); + if (unionDifference instanceof UnionModification) { + UnionModification unionModification = (UnionModification) unionDifference; + List appliedDirectiveDeletions = unionModification.getDetails(AppliedDirectiveDeletion.class); + return appliedDirectiveDeletions.stream().anyMatch(deletion -> deletion.getName().equals(appliedDirectiveName)); + } + } + } else if (rootContainer.isOfType(SchemaGraph.DIRECTIVE)) { + if (directiveDifferences.containsKey(rootContainer.getName())) { + DirectiveDifference directiveDifference = directiveDifferences.get(rootContainer.getName()); + if (directiveDifference instanceof DirectiveModification) { + DirectiveModification directiveModification = (DirectiveModification) directiveDifference; + List appliedDirectiveDeletions = directiveModification.getDetails(AppliedDirectiveDeletion.class); + return appliedDirectiveDeletions.stream().anyMatch(deletion -> deletion.getName().equals(appliedDirectiveName)); + } + } + } + return false; + } + private boolean isNewInputFieldExistingInputObject(String inputObjectName, String fieldName) { if (!inputObjectDifferences.containsKey(inputObjectName)) { return false; @@ -1475,7 +2114,8 @@ private boolean isArgumentNewForExistingObjectField(String objectName, String fi return newArgs.stream().anyMatch(detail -> detail.getFieldName().equals(fieldName) && detail.getName().equals(argumentName)); } - private boolean isArgumentDeletedFromExistingObjectField(String objectName, String fieldName, String argumentName) { + private boolean isArgumentDeletedFromExistingObjectField(String objectName, String fieldName, String + argumentName) { if (!objectDifferences.containsKey(objectName)) { return false; } @@ -1494,7 +2134,8 @@ private boolean isArgumentDeletedFromExistingObjectField(String objectName, Stri return deletedArgs.stream().anyMatch(detail -> detail.getFieldName().equals(fieldName) && detail.getName().equals(argumentName)); } - private boolean isArgumentDeletedFromExistingInterfaceField(String interfaceName, String fieldName, String argumentName) { + private boolean isArgumentDeletedFromExistingInterfaceField(String interfaceName, String fieldName, String + argumentName) { if (!interfaceDifferences.containsKey(interfaceName)) { return false; } diff --git a/src/main/java/graphql/schema/diffing/ana/SchemaDifference.java b/src/main/java/graphql/schema/diffing/ana/SchemaDifference.java index 04172d4e91..d6a2c8e494 100644 --- a/src/main/java/graphql/schema/diffing/ana/SchemaDifference.java +++ b/src/main/java/graphql/schema/diffing/ana/SchemaDifference.java @@ -1389,12 +1389,15 @@ public String getDirectiveName() { } class AppliedDirectiveDirectiveArgumentLocation implements AppliedDirectiveLocationDetail { + // this is the applied directive name private final String directiveName; + private final String directiveDefinitionName; private final String argumentName; - public AppliedDirectiveDirectiveArgumentLocation(String directiveName, String argumentName) { - this.directiveName = directiveName; + public AppliedDirectiveDirectiveArgumentLocation(String directiveDefinitionName, String argumentName, String directiveName) { + this.directiveDefinitionName = directiveDefinitionName; this.argumentName = argumentName; + this.directiveName = directiveName; } public String getDirectiveName() { @@ -1404,6 +1407,10 @@ public String getDirectiveName() { public String getArgumentName() { return argumentName; } + + public String getDirectiveDefinitionName() { + return directiveDefinitionName; + } } class AppliedDirectiveInterfaceFieldArgumentLocation implements AppliedDirectiveLocationDetail { @@ -1450,6 +1457,10 @@ public AppliedDirectiveUnionLocation(String name, String directiveName) { public String getName() { return name; } + + public String getDirectiveName() { + return directiveName; + } } class AppliedDirectiveEnumLocation implements AppliedDirectiveLocationDetail { @@ -1598,11 +1609,37 @@ class AppliedDirectiveRenamed { } - class AppliedDirectiveArgumentAddition { + class AppliedDirectiveArgumentAddition implements ObjectModificationDetail, + InterfaceModificationDetail, + ScalarModificationDetail, + EnumModificationDetail, + UnionModificationDetail, + InputObjectModificationDetail, + DirectiveModificationDetail { + private final AppliedDirectiveLocationDetail locationDetail; + private final String argumentName; + + public AppliedDirectiveArgumentAddition(AppliedDirectiveLocationDetail locationDetail, String argumentName) { + this.locationDetail = locationDetail; + this.argumentName = argumentName; + } + public AppliedDirectiveLocationDetail getLocationDetail() { + return locationDetail; + } + + public String getArgumentName() { + return argumentName; + } } - class AppliedDirectiveArgumentDeletion implements ObjectModificationDetail, InterfaceModificationDetail { + class AppliedDirectiveArgumentDeletion implements ObjectModificationDetail, + InterfaceModificationDetail, + ScalarModificationDetail, + EnumModificationDetail, + UnionModificationDetail, + InputObjectModificationDetail, + DirectiveModificationDetail { private final AppliedDirectiveLocationDetail locationDetail; private final String argumentName; @@ -1621,7 +1658,13 @@ public String getArgumentName() { } - class AppliedDirectiveArgumentValueModification implements ObjectModificationDetail { + class AppliedDirectiveArgumentValueModification implements ObjectModificationDetail, + InterfaceModificationDetail, + InputObjectModificationDetail, + EnumModificationDetail, + UnionModificationDetail, + ScalarModificationDetail, + DirectiveModificationDetail { private final AppliedDirectiveLocationDetail locationDetail; private final String argumentName; private final String oldValue; @@ -1651,7 +1694,7 @@ public String getNewValue() { } } - class AppliedDirectiveArgumentRename implements ObjectModificationDetail { + class AppliedDirectiveArgumentRename implements ObjectModificationDetail, InterfaceModificationDetail { private final AppliedDirectiveLocationDetail locationDetail; private final String oldName; private final String newName; diff --git a/src/main/java/graphql/schema/idl/ArgValueOfAllowedTypeChecker.java b/src/main/java/graphql/schema/idl/ArgValueOfAllowedTypeChecker.java index c9d2498abd..9dcd16b768 100644 --- a/src/main/java/graphql/schema/idl/ArgValueOfAllowedTypeChecker.java +++ b/src/main/java/graphql/schema/idl/ArgValueOfAllowedTypeChecker.java @@ -30,8 +30,6 @@ import graphql.schema.CoercingParseLiteralException; import graphql.schema.GraphQLScalarType; import graphql.schema.idl.errors.DirectiveIllegalArgumentTypeError; -import graphql.util.LogKit; -import org.slf4j.Logger; import java.util.List; import java.util.Locale; @@ -64,8 +62,6 @@ @Internal class ArgValueOfAllowedTypeChecker { - private static final Logger logNotSafe = LogKit.getNotPrivacySafeLogger(ArgValueOfAllowedTypeChecker.class); - private final Directive directive; private final Node element; private final String elementName; @@ -291,9 +287,6 @@ private boolean isArgumentValueScalarLiteral(GraphQLScalarType scalarType, Value scalarType.getCoercing().parseLiteral(instanceValue, CoercedVariables.emptyVariables(), GraphQLContext.getDefault(), Locale.getDefault()); return true; } catch (CoercingParseLiteralException ex) { - if (logNotSafe.isDebugEnabled()) { - logNotSafe.debug("Attempted parsing literal into '{}' but got the following error: ", scalarType.getName(), ex); - } return false; } } diff --git a/src/main/java/graphql/schema/idl/RuntimeWiring.java b/src/main/java/graphql/schema/idl/RuntimeWiring.java index 69ba9f6633..0a95adce08 100644 --- a/src/main/java/graphql/schema/idl/RuntimeWiring.java +++ b/src/main/java/graphql/schema/idl/RuntimeWiring.java @@ -1,6 +1,5 @@ package graphql.schema.idl; -import graphql.DeprecatedAt; import graphql.PublicApi; import graphql.schema.DataFetcher; import graphql.schema.GraphQLCodeRegistry; @@ -8,10 +7,10 @@ import graphql.schema.GraphQLSchema; import graphql.schema.GraphqlTypeComparatorRegistry; import graphql.schema.TypeResolver; +import graphql.schema.idl.errors.StrictModeWiringException; import graphql.schema.visibility.GraphqlFieldVisibility; import java.util.ArrayList; -import java.util.Collection; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; @@ -20,6 +19,7 @@ import static graphql.Assert.assertNotNull; import static graphql.schema.visibility.DefaultGraphqlFieldVisibility.DEFAULT_FIELD_VISIBILITY; +import static java.lang.String.format; /** * A runtime wiring is a specification of data fetchers, type resolvers and custom scalars that are needed @@ -36,7 +36,6 @@ public class RuntimeWiring { private final List directiveWiring; private final WiringFactory wiringFactory; private final Map enumValuesProviders; - private final Collection schemaGeneratorPostProcessings; private final GraphqlFieldVisibility fieldVisibility; private final GraphQLCodeRegistry codeRegistry; private final GraphqlTypeComparatorRegistry comparatorRegistry; @@ -58,7 +57,6 @@ private RuntimeWiring(Builder builder) { this.directiveWiring = builder.directiveWiring; this.wiringFactory = builder.wiringFactory; this.enumValuesProviders = builder.enumValuesProviders; - this.schemaGeneratorPostProcessings = builder.schemaGeneratorPostProcessings; this.fieldVisibility = builder.fieldVisibility; this.codeRegistry = builder.codeRegistry; this.comparatorRegistry = builder.comparatorRegistry; @@ -86,7 +84,6 @@ public static Builder newRuntimeWiring(RuntimeWiring originalRuntimeWiring) { builder.directiveWiring.addAll(originalRuntimeWiring.directiveWiring); builder.wiringFactory = originalRuntimeWiring.wiringFactory; builder.enumValuesProviders.putAll(originalRuntimeWiring.enumValuesProviders); - builder.schemaGeneratorPostProcessings.addAll(originalRuntimeWiring.schemaGeneratorPostProcessings); builder.fieldVisibility = originalRuntimeWiring.fieldVisibility; builder.codeRegistry = originalRuntimeWiring.codeRegistry; builder.comparatorRegistry = originalRuntimeWiring.comparatorRegistry; @@ -151,10 +148,6 @@ public List getDirectiveWiring() { return directiveWiring; } - public Collection getSchemaGeneratorPostProcessings() { - return schemaGeneratorPostProcessings; - } - public GraphqlTypeComparatorRegistry getComparatorRegistry() { return comparatorRegistry; } @@ -168,8 +161,8 @@ public static class Builder { private final Map enumValuesProviders = new LinkedHashMap<>(); private final Map registeredDirectiveWiring = new LinkedHashMap<>(); private final List directiveWiring = new ArrayList<>(); - private final Collection schemaGeneratorPostProcessings = new ArrayList<>(); private WiringFactory wiringFactory = new NoopWiringFactory(); + private boolean strictMode = false; private GraphqlFieldVisibility fieldVisibility = DEFAULT_FIELD_VISIBILITY; private GraphQLCodeRegistry codeRegistry = GraphQLCodeRegistry.newCodeRegistry().build(); private GraphqlTypeComparatorRegistry comparatorRegistry = GraphqlTypeComparatorRegistry.AS_IS_REGISTRY; @@ -178,6 +171,16 @@ private Builder() { ScalarInfo.GRAPHQL_SPECIFICATION_SCALARS.forEach(this::scalar); } + /** + * This puts the builder into strict mode, so if things get defined twice, for example, it will throw a {@link StrictModeWiringException}. + * + * @return this builder + */ + public Builder strictMode() { + this.strictMode = true; + return this; + } + /** * Adds a wiring factory into the runtime wiring * @@ -223,6 +226,9 @@ public Builder codeRegistry(GraphQLCodeRegistry.Builder codeRegistry) { * @return the runtime wiring builder */ public Builder scalar(GraphQLScalarType scalarType) { + if (strictMode && scalars.containsKey(scalarType.getName())) { + throw new StrictModeWiringException(format("The scalar %s is already defined", scalarType.getName())); + } scalars.put(scalarType.getName(), scalarType); return this; } @@ -273,17 +279,26 @@ public Builder type(String typeName, UnaryOperator bu public Builder type(TypeRuntimeWiring typeRuntimeWiring) { String typeName = typeRuntimeWiring.getTypeName(); Map typeDataFetchers = dataFetchers.computeIfAbsent(typeName, k -> new LinkedHashMap<>()); - typeRuntimeWiring.getFieldDataFetchers().forEach(typeDataFetchers::put); + if (strictMode && !typeDataFetchers.isEmpty()) { + throw new StrictModeWiringException(format("The type %s has already been defined", typeName)); + } + typeDataFetchers.putAll(typeRuntimeWiring.getFieldDataFetchers()); defaultDataFetchers.put(typeName, typeRuntimeWiring.getDefaultDataFetcher()); TypeResolver typeResolver = typeRuntimeWiring.getTypeResolver(); if (typeResolver != null) { + if (strictMode && this.typeResolvers.containsKey(typeName)) { + throw new StrictModeWiringException(format("The type %s already has a type resolver defined", typeName)); + } this.typeResolvers.put(typeName, typeResolver); } EnumValuesProvider enumValuesProvider = typeRuntimeWiring.getEnumValuesProvider(); if (enumValuesProvider != null) { + if (strictMode && this.enumValuesProviders.containsKey(typeName)) { + throw new StrictModeWiringException(format("The type %s already has a enum provider defined", typeName)); + } this.enumValuesProviders.put(typeName, enumValuesProvider); } return this; @@ -347,21 +362,6 @@ public Builder comparatorRegistry(GraphqlTypeComparatorRegistry comparatorRegist return this; } - /** - * Adds a schema transformer into the mix - * - * @param schemaGeneratorPostProcessing the non null schema transformer to add - * - * @return the runtime wiring builder - * @deprecated This mechanism can be achieved in a better way via {@link graphql.schema.SchemaTransformer} - * after the schema is built - */ - @Deprecated - @DeprecatedAt(value = "2022-10-29") - public Builder transformer(SchemaGeneratorPostProcessing schemaGeneratorPostProcessing) { - this.schemaGeneratorPostProcessings.add(assertNotNull(schemaGeneratorPostProcessing)); - return this; - } /** * @return the built runtime wiring diff --git a/src/main/java/graphql/schema/idl/SchemaDirectiveWiringEnvironment.java b/src/main/java/graphql/schema/idl/SchemaDirectiveWiringEnvironment.java index dfe09acb43..e888039308 100644 --- a/src/main/java/graphql/schema/idl/SchemaDirectiveWiringEnvironment.java +++ b/src/main/java/graphql/schema/idl/SchemaDirectiveWiringEnvironment.java @@ -1,6 +1,5 @@ package graphql.schema.idl; -import graphql.DeprecatedAt; import graphql.PublicApi; import graphql.language.NamedNode; import graphql.language.NodeParentTree; @@ -43,8 +42,7 @@ public interface SchemaDirectiveWiringEnvironment getDirectives(); /** @@ -79,8 +76,7 @@ public interface SchemaDirectiveWiringEnvironment T wireDirectives( private T invokeWiring(T element, EnvInvoker invoker, SchemaDirectiveWiring schemaDirectiveWiring, SchemaDirectiveWiringEnvironment env) { T newElement = invoker.apply(schemaDirectiveWiring, env); - assertNotNull(newElement, () -> "The SchemaDirectiveWiring MUST return a non null return value for element '" + element.getName() + "'"); + assertNotNull(newElement, "The SchemaDirectiveWiring MUST return a non null return value for element '%s'",element.getName()); return newElement; } diff --git a/src/main/java/graphql/schema/idl/SchemaGeneratorHelper.java b/src/main/java/graphql/schema/idl/SchemaGeneratorHelper.java index 317828aaf8..aa4878efb4 100644 --- a/src/main/java/graphql/schema/idl/SchemaGeneratorHelper.java +++ b/src/main/java/graphql/schema/idl/SchemaGeneratorHelper.java @@ -401,7 +401,7 @@ private GraphQLEnumValueDefinition buildEnumValue(BuildContext buildCtx, if (enumValuesProvider != null) { value = enumValuesProvider.getValue(evd.getName()); assertNotNull(value, - () -> format("EnumValuesProvider for %s returned null for %s", typeDefinition.getName(), evd.getName())); + "EnumValuesProvider for %s returned null for %s", typeDefinition.getName(), evd.getName()); } else { value = evd.getName(); } diff --git a/src/main/java/graphql/schema/idl/SchemaGeneratorPostProcessing.java b/src/main/java/graphql/schema/idl/SchemaGeneratorPostProcessing.java deleted file mode 100644 index 906d187f28..0000000000 --- a/src/main/java/graphql/schema/idl/SchemaGeneratorPostProcessing.java +++ /dev/null @@ -1,27 +0,0 @@ -package graphql.schema.idl; - -import graphql.DeprecatedAt; -import graphql.PublicSpi; -import graphql.schema.GraphQLSchema; - -/** - * These are called by the {@link SchemaGenerator} after a valid schema has been built - * and they can then adjust it accordingly with some sort of post processing. - * - * @deprecated This mechanism can be achieved in a better way via {@link graphql.schema.SchemaTransformer} - * after the schema is built - */ -@PublicSpi -@Deprecated -@DeprecatedAt(value = "2022-10-29") -public interface SchemaGeneratorPostProcessing { - - /** - * Called to transform the schema from its built state into something else - * - * @param originalSchema the original built schema - * - * @return a non null schema - */ - GraphQLSchema process(GraphQLSchema originalSchema); -} diff --git a/src/main/java/graphql/schema/idl/SchemaPrinter.java b/src/main/java/graphql/schema/idl/SchemaPrinter.java index 84c5c7e6e2..9ac97f6a2e 100644 --- a/src/main/java/graphql/schema/idl/SchemaPrinter.java +++ b/src/main/java/graphql/schema/idl/SchemaPrinter.java @@ -98,6 +98,8 @@ public static class Options { private final boolean descriptionsAsHashComments; + private final Predicate includeDirectiveDefinition; + private final Predicate includeDirective; private final Predicate includeSchemaElement; @@ -110,6 +112,7 @@ private Options(boolean includeIntrospectionTypes, boolean includeScalars, boolean includeSchemaDefinition, boolean includeDirectiveDefinitions, + Predicate includeDirectiveDefinition, boolean useAstDefinitions, boolean descriptionsAsHashComments, Predicate includeDirective, @@ -120,6 +123,7 @@ private Options(boolean includeIntrospectionTypes, this.includeScalars = includeScalars; this.includeSchemaDefinition = includeSchemaDefinition; this.includeDirectiveDefinitions = includeDirectiveDefinitions; + this.includeDirectiveDefinition = includeDirectiveDefinition; this.includeDirective = includeDirective; this.useAstDefinitions = useAstDefinitions; this.descriptionsAsHashComments = descriptionsAsHashComments; @@ -144,6 +148,10 @@ public boolean isIncludeDirectiveDefinitions() { return includeDirectiveDefinitions; } + public Predicate getIncludeDirectiveDefinition() { + return includeDirectiveDefinition; + } + public Predicate getIncludeDirective() { return includeDirective; } @@ -164,14 +172,16 @@ public boolean isUseAstDefinitions() { return useAstDefinitions; } - public boolean isIncludeAstDefinitionComments() { return includeAstDefinitionComments; } + public boolean isIncludeAstDefinitionComments() { + return includeAstDefinitionComments; + } public static Options defaultOptions() { return new Options(false, true, false, true, - false, + directive -> true, false, false, directive -> true, element -> true, @@ -191,7 +201,7 @@ public Options includeIntrospectionTypes(boolean flag) { this.includeScalars, this.includeSchemaDefinition, this.includeDirectiveDefinitions, - this.useAstDefinitions, + this.includeDirectiveDefinition, this.useAstDefinitions, this.descriptionsAsHashComments, this.includeDirective, this.includeSchemaElement, @@ -211,7 +221,7 @@ public Options includeScalarTypes(boolean flag) { flag, this.includeSchemaDefinition, this.includeDirectiveDefinitions, - this.useAstDefinitions, + this.includeDirectiveDefinition, this.useAstDefinitions, this.descriptionsAsHashComments, this.includeDirective, this.includeSchemaElement, @@ -234,6 +244,7 @@ public Options includeSchemaDefinition(boolean flag) { this.includeScalars, flag, this.includeDirectiveDefinitions, + this.includeDirectiveDefinition, this.useAstDefinitions, this.descriptionsAsHashComments, this.includeDirective, @@ -259,6 +270,29 @@ public Options includeDirectiveDefinitions(boolean flag) { this.includeScalars, this.includeSchemaDefinition, flag, + directive -> flag, + this.useAstDefinitions, + this.descriptionsAsHashComments, + this.includeDirective, + this.includeSchemaElement, + this.comparatorRegistry, + this.includeAstDefinitionComments); + } + + + /** + * This is a Predicate that decides whether a directive definition is printed. + * + * @param includeDirectiveDefinition the predicate to decide of a directive defintion is printed + * + * @return new instance of options + */ + public Options includeDirectiveDefinition(Predicate includeDirectiveDefinition) { + return new Options(this.includeIntrospectionTypes, + this.includeScalars, + this.includeSchemaDefinition, + this.includeDirectiveDefinitions, + includeDirectiveDefinition, this.useAstDefinitions, this.descriptionsAsHashComments, this.includeDirective, @@ -280,6 +314,7 @@ public Options includeDirectives(boolean flag) { this.includeScalars, this.includeSchemaDefinition, this.includeDirectiveDefinitions, + this.includeDirectiveDefinition, this.useAstDefinitions, this.descriptionsAsHashComments, directive -> flag, @@ -300,6 +335,7 @@ public Options includeDirectives(Predicate includeDirective) { this.includeScalars, this.includeSchemaDefinition, this.includeDirectiveDefinitions, + this.includeDirectiveDefinition, this.useAstDefinitions, this.descriptionsAsHashComments, includeDirective, @@ -308,6 +344,7 @@ public Options includeDirectives(Predicate includeDirective) { this.includeAstDefinitionComments); } + /** * This is a general purpose Predicate that decides whether a schema element is printed ever. * @@ -321,6 +358,7 @@ public Options includeSchemaElement(Predicate includeSchem this.includeScalars, this.includeSchemaDefinition, this.includeDirectiveDefinitions, + this.includeDirectiveDefinition, this.useAstDefinitions, this.descriptionsAsHashComments, this.includeDirective, @@ -342,6 +380,7 @@ public Options useAstDefinitions(boolean flag) { this.includeScalars, this.includeSchemaDefinition, this.includeDirectiveDefinitions, + this.includeDirectiveDefinition, flag, this.descriptionsAsHashComments, this.includeDirective, @@ -365,6 +404,7 @@ public Options descriptionsAsHashComments(boolean flag) { this.includeScalars, this.includeSchemaDefinition, this.includeDirectiveDefinitions, + this.includeDirectiveDefinition, this.useAstDefinitions, flag, this.includeDirective, @@ -387,6 +427,7 @@ public Options setComparators(GraphqlTypeComparatorRegistry comparatorRegistry) this.includeScalars, this.includeSchemaDefinition, this.includeDirectiveDefinitions, + this.includeDirectiveDefinition, this.useAstDefinitions, this.descriptionsAsHashComments, this.includeDirective, @@ -409,6 +450,7 @@ public Options includeAstDefinitionComments(boolean flag) { this.includeScalars, this.includeSchemaDefinition, this.includeDirectiveDefinitions, + this.includeDirectiveDefinition, this.useAstDefinitions, this.descriptionsAsHashComments, this.includeDirective, @@ -638,7 +680,9 @@ private SchemaElementPrinter unionPrinter() { private SchemaElementPrinter directivePrinter() { return (out, directive, visibility) -> { - if (options.isIncludeDirectiveDefinitions()) { + boolean isOnEver = options.isIncludeDirectiveDefinitions(); + boolean specificTest = options.getIncludeDirectiveDefinition().test(directive.getName()); + if (isOnEver && specificTest) { String s = directiveDefinition(directive); out.format("%s", s); out.print("\n\n"); @@ -876,7 +920,7 @@ String directivesString(Class parentType, boolea private String directivesString(Class parentType, List directives) { directives = directives.stream() // @deprecated is special - we always print it if something is deprecated - .filter(directive -> options.getIncludeDirective().test(directive.getName()) || isDeprecatedDirective(directive)) + .filter(directive -> options.getIncludeDirective().test(directive.getName())) .filter(options.getIncludeSchemaElement()) .collect(toList()); @@ -909,10 +953,7 @@ private String directiveString(GraphQLAppliedDirective directive) { return ""; } if (!options.getIncludeDirective().test(directive.getName())) { - // @deprecated is special - we always print it if something is deprecated - if (!isDeprecatedDirective(directive)) { - return ""; - } + return ""; } StringBuilder sb = new StringBuilder(); @@ -948,6 +989,13 @@ private String directiveString(GraphQLAppliedDirective directive) { return sb.toString(); } + private boolean isDeprecatedDirectiveAllowed() { + // we ask if the special deprecated directive, + // which can be programmatically on a type without an applied directive, + // should be printed or not + return options.getIncludeDirective().test(DeprecatedDirective.getName()); + } + private boolean isDeprecatedDirective(GraphQLAppliedDirective directive) { return directive.getName().equals(DeprecatedDirective.getName()); } @@ -960,14 +1008,14 @@ private boolean hasDeprecatedDirective(List directives) private List addDeprecatedDirectiveIfNeeded(GraphQLDirectiveContainer directiveContainer) { List directives = DirectivesUtil.toAppliedDirectives(directiveContainer); - if (!hasDeprecatedDirective(directives)) { + if (!hasDeprecatedDirective(directives) && isDeprecatedDirectiveAllowed()) { directives = new ArrayList<>(directives); - String reason = getDeprecationReason(directiveContainer); - GraphQLAppliedDirectiveArgument arg = GraphQLAppliedDirectiveArgument.newArgument() - .name("reason") - .valueProgrammatic(reason) - .type(GraphQLString) - .build(); + String reason = getDeprecationReason(directiveContainer); + GraphQLAppliedDirectiveArgument arg = GraphQLAppliedDirectiveArgument.newArgument() + .name("reason") + .valueProgrammatic(reason) + .type(GraphQLString) + .build(); GraphQLAppliedDirective directive = GraphQLAppliedDirective.newDirective() .name("deprecated") .argument(arg) @@ -1104,7 +1152,7 @@ private void printComments(PrintWriter out, Object graphQLType, String prefix) { if (options.isIncludeAstDefinitionComments()) { String commentsText = getAstDefinitionComments(graphQLType); if (!isNullOrEmpty(commentsText)) { - List lines = Arrays.asList(commentsText.split("\n") ); + List lines = Arrays.asList(commentsText.split("\n")); if (!lines.isEmpty()) { printMultiLineHashDescription(out, prefix, lines); } @@ -1179,7 +1227,7 @@ private String getAstDefinitionComments(Object commentHolder) { } private String comments(List comments) { - if ( comments == null || comments.isEmpty() ) { + if (comments == null || comments.isEmpty()) { return null; } String s = comments.stream().map(c -> c.getContent()).collect(joining("\n", "", "\n")); diff --git a/src/main/java/graphql/schema/idl/TypeRuntimeWiring.java b/src/main/java/graphql/schema/idl/TypeRuntimeWiring.java index 60bfe6f60c..3480a5d6b0 100644 --- a/src/main/java/graphql/schema/idl/TypeRuntimeWiring.java +++ b/src/main/java/graphql/schema/idl/TypeRuntimeWiring.java @@ -4,12 +4,15 @@ import graphql.schema.DataFetcher; import graphql.schema.GraphQLSchema; import graphql.schema.TypeResolver; +import graphql.schema.idl.errors.StrictModeWiringException; import java.util.LinkedHashMap; import java.util.Map; +import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.UnaryOperator; import static graphql.Assert.assertNotNull; +import static java.lang.String.format; /** * A type runtime wiring is a specification of the data fetchers and possible type resolver for a given type name. @@ -18,6 +21,28 @@ */ @PublicApi public class TypeRuntimeWiring { + + private final static AtomicBoolean DEFAULT_STRICT_MODE = new AtomicBoolean(false); + + /** + * By default {@link TypeRuntimeWiring} builders are not in strict mode, but you can set a JVM wide value + * so that any created will be. + * + * @param strictMode the desired strict mode state + * + * @see Builder#strictMode() + */ + public static void setStrictModeJvmWide(boolean strictMode) { + DEFAULT_STRICT_MODE.set(strictMode); + } + + /** + * @return the current JVM wide state of strict mode + */ + public static boolean getStrictModeJvmWide() { + return DEFAULT_STRICT_MODE.get(); + } + private final String typeName; private final DataFetcher defaultDataFetcher; private final Map fieldDataFetchers; @@ -82,6 +107,7 @@ public static class Builder { private DataFetcher defaultDataFetcher; private TypeResolver typeResolver; private EnumValuesProvider enumValuesProvider; + private boolean strictMode = DEFAULT_STRICT_MODE.get(); /** * Sets the type name for this type wiring. You MUST set this. @@ -95,6 +121,17 @@ public Builder typeName(String typeName) { return this; } + /** + * This puts the builder into strict mode, so if things get defined twice, for example, it + * will throw a {@link StrictModeWiringException}. + * + * @return this builder + */ + public Builder strictMode() { + this.strictMode = true; + return this; + } + /** * Adds a data fetcher for the current type to the specified field * @@ -106,6 +143,9 @@ public Builder typeName(String typeName) { public Builder dataFetcher(String fieldName, DataFetcher dataFetcher) { assertNotNull(dataFetcher, () -> "you must provide a data fetcher"); assertNotNull(fieldName, () -> "you must tell us what field"); + if (strictMode) { + assertFieldStrictly(fieldName); + } fieldDataFetchers.put(fieldName, dataFetcher); return this; } @@ -119,10 +159,21 @@ public Builder dataFetcher(String fieldName, DataFetcher dataFetcher) { */ public Builder dataFetchers(Map dataFetchersMap) { assertNotNull(dataFetchersMap, () -> "you must provide a data fetchers map"); + if (strictMode) { + dataFetchersMap.forEach((fieldName, df) -> { + assertFieldStrictly(fieldName); + }); + } fieldDataFetchers.putAll(dataFetchersMap); return this; } + private void assertFieldStrictly(String fieldName) { + if (fieldDataFetchers.containsKey(fieldName)) { + throw new StrictModeWiringException(format("The field %s already has a data fetcher defined", fieldName)); + } + } + /** * All fields in a type need a data fetcher of some sort and this method is called to provide the default data fetcher * that will be used for this type if no specific one has been provided per field. diff --git a/src/main/java/graphql/schema/idl/TypeUtil.java b/src/main/java/graphql/schema/idl/TypeUtil.java index e790cf4693..0189666bd4 100644 --- a/src/main/java/graphql/schema/idl/TypeUtil.java +++ b/src/main/java/graphql/schema/idl/TypeUtil.java @@ -17,18 +17,12 @@ public class TypeUtil { * @return the type in graphql SDL format, eg [typeName!]! */ public static String simplePrint(Type type) { - StringBuilder sb = new StringBuilder(); if (isNonNull(type)) { - sb.append(simplePrint(unwrapOne(type))); - sb.append("!"); + return simplePrint(unwrapOne(type)) + "!"; } else if (isList(type)) { - sb.append("["); - sb.append(simplePrint(unwrapOne(type))); - sb.append("]"); - } else { - sb.append(((TypeName) type).getName()); + return "[" + simplePrint(unwrapOne(type)) + "]"; } - return sb.toString(); + return ((TypeName) type).getName(); } /** diff --git a/src/main/java/graphql/schema/idl/errors/StrictModeWiringException.java b/src/main/java/graphql/schema/idl/errors/StrictModeWiringException.java new file mode 100644 index 0000000000..6da6b637fc --- /dev/null +++ b/src/main/java/graphql/schema/idl/errors/StrictModeWiringException.java @@ -0,0 +1,17 @@ +package graphql.schema.idl.errors; + +import graphql.GraphQLException; +import graphql.PublicApi; +import graphql.schema.idl.RuntimeWiring; +import graphql.schema.idl.TypeRuntimeWiring; + +/** + * An exception that is throw when {@link RuntimeWiring.Builder#strictMode()} or {@link TypeRuntimeWiring.Builder#strictMode()} is true and + * something gets redefined. + */ +@PublicApi +public class StrictModeWiringException extends GraphQLException { + public StrictModeWiringException(String msg) { + super(msg); + } +} diff --git a/src/main/java/graphql/schema/validation/AppliedDirectiveArgumentsAreValid.java b/src/main/java/graphql/schema/validation/AppliedDirectiveArgumentsAreValid.java index 82e59e2785..1eec8bff75 100644 --- a/src/main/java/graphql/schema/validation/AppliedDirectiveArgumentsAreValid.java +++ b/src/main/java/graphql/schema/validation/AppliedDirectiveArgumentsAreValid.java @@ -6,11 +6,14 @@ import graphql.execution.ValuesResolver; import graphql.language.Value; import graphql.schema.CoercingParseValueException; +import graphql.schema.GraphQLAppliedDirective; +import graphql.schema.GraphQLAppliedDirectiveArgument; import graphql.schema.GraphQLArgument; import graphql.schema.GraphQLDirective; import graphql.schema.GraphQLInputType; import graphql.schema.GraphQLSchema; import graphql.schema.GraphQLSchemaElement; +import graphql.schema.GraphQLTypeUtil; import graphql.schema.GraphQLTypeVisitorStub; import graphql.schema.InputValueWithState; import graphql.util.TraversalControl; @@ -32,29 +35,56 @@ public TraversalControl visitGraphQLDirective(GraphQLDirective directive, Traver // if there is no parent it means it is just a directive definition and not an applied directive if (context.getParentNode() != null) { for (GraphQLArgument graphQLArgument : directive.getArguments()) { - checkArgument(directive, graphQLArgument, context); + checkArgument( + directive.getName(), + graphQLArgument.getName(), + graphQLArgument.getArgumentValue(), + graphQLArgument.getType(), + context + ); } } return TraversalControl.CONTINUE; } - private void checkArgument(GraphQLDirective directive, GraphQLArgument argument, TraverserContext context) { - if (!argument.hasSetValue()) { - return; + @Override + public TraversalControl visitGraphQLAppliedDirective(GraphQLAppliedDirective directive, TraverserContext context) { + // if there is no parent it means it is just a directive definition and not an applied directive + if (context.getParentNode() != null) { + for (GraphQLAppliedDirectiveArgument graphQLArgument : directive.getArguments()) { + checkArgument( + directive.getName(), + graphQLArgument.getName(), + graphQLArgument.getArgumentValue(), + graphQLArgument.getType(), + context + ); + } } + return TraversalControl.CONTINUE; + } + + private void checkArgument( + String directiveName, + String argumentName, + InputValueWithState argumentValue, + GraphQLInputType argumentType, + TraverserContext context + ) { GraphQLSchema schema = context.getVarFromParents(GraphQLSchema.class); SchemaValidationErrorCollector errorCollector = context.getVarFromParents(SchemaValidationErrorCollector.class); - InputValueWithState argumentValue = argument.getArgumentValue(); boolean invalid = false; if (argumentValue.isLiteral() && - !validationUtil.isValidLiteralValue((Value) argumentValue.getValue(), argument.getType(), schema, GraphQLContext.getDefault(), Locale.getDefault())) { + !validationUtil.isValidLiteralValue((Value) argumentValue.getValue(), argumentType, schema, GraphQLContext.getDefault(), Locale.getDefault())) { invalid = true; } else if (argumentValue.isExternal() && - !isValidExternalValue(schema, argumentValue.getValue(), argument.getType(), GraphQLContext.getDefault(), Locale.getDefault())) { + !isValidExternalValue(schema, argumentValue.getValue(), argumentType, GraphQLContext.getDefault(), Locale.getDefault())) { + invalid = true; + } else if (argumentValue.isNotSet() && GraphQLTypeUtil.isNonNull(argumentType)) { invalid = true; } if (invalid) { - String message = format("Invalid argument '%s' for applied directive of name '%s'", argument.getName(), directive.getName()); + String message = format("Invalid argument '%s' for applied directive of name '%s'", argumentName, directiveName); errorCollector.addError(new SchemaValidationError(SchemaValidationErrorType.InvalidAppliedDirectiveArgument, message)); } } diff --git a/src/main/java/graphql/schema/visibility/NoIntrospectionGraphqlFieldVisibility.java b/src/main/java/graphql/schema/visibility/NoIntrospectionGraphqlFieldVisibility.java index 604e794114..62aa16bbe0 100644 --- a/src/main/java/graphql/schema/visibility/NoIntrospectionGraphqlFieldVisibility.java +++ b/src/main/java/graphql/schema/visibility/NoIntrospectionGraphqlFieldVisibility.java @@ -12,10 +12,15 @@ * This field visibility will prevent Introspection queries from being performed. Technically this puts your * system in contravention of the specification * but some production systems want this lock down in place. + * + * @deprecated This is no longer the best way to prevent Introspection - {@link graphql.introspection.Introspection#enabledJvmWide(boolean)} + * can be used instead */ @PublicApi +@Deprecated(since = "2024-03-16") public class NoIntrospectionGraphqlFieldVisibility implements GraphqlFieldVisibility { + @Deprecated(since = "2024-03-16") public static NoIntrospectionGraphqlFieldVisibility NO_INTROSPECTION_FIELD_VISIBILITY = new NoIntrospectionGraphqlFieldVisibility(); diff --git a/src/main/java/graphql/util/Anonymizer.java b/src/main/java/graphql/util/Anonymizer.java index 34553d8777..2d0497eca6 100644 --- a/src/main/java/graphql/util/Anonymizer.java +++ b/src/main/java/graphql/util/Anonymizer.java @@ -100,7 +100,6 @@ import static graphql.schema.idl.SchemaGenerator.createdMockedSchema; import static graphql.util.TraversalControl.CONTINUE; import static graphql.util.TreeTransformerUtil.changeNode; -import static java.lang.String.format; /** * Util class which converts schemas and optionally queries @@ -735,14 +734,14 @@ public void visitField(QueryVisitorFieldEnvironment env) { List directives = field.getDirectives(); for (Directive directive : directives) { // this is a directive definition - GraphQLDirective directiveDefinition = assertNotNull(schema.getDirective(directive.getName()), () -> format("%s directive definition not found ", directive.getName())); + GraphQLDirective directiveDefinition = assertNotNull(schema.getDirective(directive.getName()), "%s directive definition not found ", directive.getName()); String directiveName = directiveDefinition.getName(); - String newDirectiveName = assertNotNull(newNames.get(directiveDefinition), () -> format("No new name found for directive %s", directiveName)); + String newDirectiveName = assertNotNull(newNames.get(directiveDefinition), "No new name found for directive %s", directiveName); astNodeToNewName.put(directive, newDirectiveName); for (Argument argument : directive.getArguments()) { GraphQLArgument argumentDefinition = directiveDefinition.getArgument(argument.getName()); - String newArgumentName = assertNotNull(newNames.get(argumentDefinition), () -> format("No new name found for directive %s argument %s", directiveName, argument.getName())); + String newArgumentName = assertNotNull(newNames.get(argumentDefinition), "No new name found for directive %s argument %s", directiveName, argument.getName()); astNodeToNewName.put(argument, newArgumentName); visitDirectiveArgumentValues(directive, argument.getValue()); } @@ -865,7 +864,7 @@ public TraversalControl visitVariableDefinition(VariableDefinition node, Travers @Override public TraversalControl visitVariableReference(VariableReference node, TraverserContext context) { - String newName = assertNotNull(variableNames.get(node.getName()), () -> format("No new variable name found for %s", node.getName())); + String newName = assertNotNull(variableNames.get(node.getName()), "No new variable name found for %s", node.getName()); return changeNode(context, node.transform(builder -> builder.name(newName))); } @@ -916,7 +915,7 @@ private static GraphQLType fromTypeToGraphQLType(Type type, GraphQLSchema schema if (type instanceof TypeName) { String typeName = ((TypeName) type).getName(); GraphQLType graphQLType = schema.getType(typeName); - graphql.Assert.assertNotNull(graphQLType, () -> "Schema must contain type " + typeName); + graphql.Assert.assertNotNull(graphQLType, "Schema must contain type %s", typeName); return graphQLType; } else if (type instanceof NonNullType) { return GraphQLNonNull.nonNull(fromTypeToGraphQLType(TypeUtil.unwrapOne(type), schema)); diff --git a/src/main/java/graphql/util/CyclicSchemaAnalyzer.java b/src/main/java/graphql/util/CyclicSchemaAnalyzer.java new file mode 100644 index 0000000000..069dba0783 --- /dev/null +++ b/src/main/java/graphql/util/CyclicSchemaAnalyzer.java @@ -0,0 +1,372 @@ +package graphql.util; + +import graphql.Assert; +import graphql.ExperimentalApi; +import graphql.introspection.Introspection; +import graphql.schema.GraphQLSchema; +import graphql.schema.diffing.Edge; +import graphql.schema.diffing.SchemaGraph; +import graphql.schema.diffing.SchemaGraphFactory; +import graphql.schema.diffing.Vertex; + +import java.util.ArrayDeque; +import java.util.ArrayList; +import java.util.LinkedHashMap; +import java.util.LinkedHashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; + +/** + * Finds all cycles in a GraphQL Schema. + * Cycles caused by built-in introspection types are filtered out. + */ +@ExperimentalApi +public class CyclicSchemaAnalyzer { + + public static class SchemaCycle { + private final List cycle; + + public SchemaCycle(List cycle) { + this.cycle = cycle; + } + + public int size() { + return cycle.size(); + } + + public List getCycle() { + return cycle; + } + + @Override + public String toString() { + return cycle.toString(); + } + } + + public static List findCycles(GraphQLSchema schema) { + return findCycles(schema, true); + } + + public static List findCycles(GraphQLSchema schema, boolean filterOutIntrospectionCycles) { + FindCyclesImpl findCyclesImpl = new FindCyclesImpl(schema); + findCyclesImpl.findAllSimpleCyclesImpl(); + List> vertexCycles = findCyclesImpl.foundCycles; + if (filterOutIntrospectionCycles) { + vertexCycles = vertexCycles.stream().filter(vertices -> { + for (Vertex vertex : vertices) { + if (Introspection.isIntrospectionTypes(vertex.getName())) { + return false; + } + } + return true; + }).collect(Collectors.toList()); + } + List result = new ArrayList<>(); + for (List vertexCycle : vertexCycles) { + List stringCycle = new ArrayList<>(); + for (Vertex vertex : vertexCycle) { + if (vertex.isOfType(SchemaGraph.OBJECT) || vertex.isOfType(SchemaGraph.INTERFACE) || vertex.isOfType(SchemaGraph.UNION)) { + stringCycle.add(vertex.getName()); + } else if (vertex.isOfType(SchemaGraph.FIELD)) { + String fieldsContainerName = findCyclesImpl.graph.getFieldsContainerForField(vertex).getName(); + stringCycle.add(fieldsContainerName + "." + vertex.getName()); + } else if (vertex.isOfType(SchemaGraph.INPUT_OBJECT)) { + stringCycle.add(vertex.getName()); + } else if (vertex.isOfType(SchemaGraph.INPUT_FIELD)) { + String inputFieldsContainerName = findCyclesImpl.graph.getFieldsContainerForField(vertex).getName(); + stringCycle.add(inputFieldsContainerName + "." + vertex.getName()); + } else { + Assert.assertShouldNeverHappen("unexpected vertex in cycle found: " + vertex); + } + } + result.add(new SchemaCycle(stringCycle)); + } + return result; + } + + private static class GraphAndIndex { + final SchemaGraph graph; + final int index; + + public GraphAndIndex(SchemaGraph graph, int index) { + this.graph = graph; + this.index = index; + } + } + + /** + * This code was originally taken from https://github.com/jgrapht/jgrapht/blob/master/jgrapht-core/src/main/java/org/jgrapht/alg/cycle/JohnsonSimpleCycles.java + * * (C) Copyright 2013-2023, by Nikolay Ognyanov and Contributors. + * * + * * JGraphT : a free Java graph-theory library + * * + * * See the CONTRIBUTORS.md file distributed with this work for additional + * * information regarding copyright ownership. + * * + * * This program and the accompanying materials are made available under the + * * terms of the Eclipse Public License 2.0 which is available at + * * http://www.eclipse.org/legal/epl-2.0, or the + * * GNU Lesser General Public License v2.1 or later + * * which is available at + * * http://www.gnu.org/licenses/old-licenses/lgpl-2.1-standalone.html. + * * + * * SPDX-License-Identifier: EPL-2.0 OR LGPL-2.1-or-later + */ + private static class FindCyclesImpl { + + private final GraphQLSchema schema; + private final SchemaGraph graph; + + // The main state of the algorithm. + private Vertex[] iToV = null; + private Map vToI = null; + private Set blocked = null; + private Map> bSets = null; + private ArrayDeque stack = null; + + // The state of the embedded Tarjan SCC algorithm. + private List> foundSCCs = null; + private int index = 0; + private Map vIndex = null; + private Map vLowlink = null; + private ArrayDeque path = null; + private Set pathSet = null; + + private List> foundCycles = new ArrayList<>(); + + public FindCyclesImpl(GraphQLSchema schema) { + this.schema = schema; + SchemaGraphFactory schemaGraphFactory = new SchemaGraphFactory(); + this.graph = schemaGraphFactory.createGraph(schema); + iToV = (Vertex[]) graph.getVertices().toArray(new Vertex[0]); + vToI = new LinkedHashMap<>(); + blocked = new LinkedHashSet<>(); + bSets = new LinkedHashMap<>(); + stack = new ArrayDeque<>(); + + for (int i = 0; i < iToV.length; i++) { + vToI.put(iToV[i], i); + } + } + + public List> findAllSimpleCyclesImpl() { + int startIndex = 0; + + int size = graph.getVertices().size(); + while (startIndex < size) { + GraphAndIndex minSCCGResult = findMinSCSG(startIndex); + if (minSCCGResult != null) { + startIndex = minSCCGResult.index; + SchemaGraph scg = minSCCGResult.graph; + Vertex startV = toV(startIndex); + for (Edge e : scg.getAdjacentEdges(startV)) { + Vertex v = e.getTo(); + blocked.remove(v); + getBSet(v).clear(); + } + findCyclesInSCG(startIndex, startIndex, scg); + startIndex++; + } else { + break; + } + } + return this.foundCycles; + } + + private GraphAndIndex findMinSCSG(int startIndex) { + /* + * Per Johnson : "adjacency structure of strong component $K$ with least vertex in subgraph + * of $G$ induced by $(s, s + 1, n)$". Or in contemporary terms: the strongly connected + * component of the subgraph induced by $(v_1, \dotso ,v_n)$ which contains the minimum + * (among those SCCs) vertex index. We return that index together with the graph. + */ + initMinSCGState(); + + List> foundSCCs = findSCCS(startIndex); + + // find the SCC with the minimum index + int minIndexFound = Integer.MAX_VALUE; + Set minSCC = null; + for (Set scc : foundSCCs) { + for (Vertex v : scc) { + int t = toI(v); + if (t < minIndexFound) { + minIndexFound = t; + minSCC = scc; + } + } + } + if (minSCC == null) { + return null; + } + + // build a graph for the SCC found + SchemaGraph resultGraph = new SchemaGraph(); + for (Vertex v : minSCC) { + resultGraph.addVertex(v); + } + for (Vertex v : minSCC) { + for (Vertex w : minSCC) { + Edge edge = graph.getEdge(v, w); + if (edge != null) { + resultGraph.addEdge(edge); + } + } + } + + GraphAndIndex graphAndIndex = new GraphAndIndex(resultGraph, minIndexFound); + clearMinSCCState(); + return graphAndIndex; + } + + private List> findSCCS(int startIndex) { + // Find SCCs in the subgraph induced + // by vertices startIndex and beyond. + // A call to StrongConnectivityAlgorithm + // would be too expensive because of the + // need to materialize the subgraph. + // So - do a local search by the Tarjan's + // algorithm and pretend that vertices + // with an index smaller than startIndex + // do not exist. + for (Vertex v : graph.getVertices()) { + int vI = toI(v); + if (vI < startIndex) { + continue; + } + if (!vIndex.containsKey(v)) { + getSCCs(startIndex, vI); + } + } + List> result = foundSCCs; + foundSCCs = null; + return result; + } + + private void getSCCs(int startIndex, int vertexIndex) { + Vertex vertex = toV(vertexIndex); + vIndex.put(vertex, index); + vLowlink.put(vertex, index); + index++; + path.push(vertex); + pathSet.add(vertex); + + List edges = graph.getAdjacentEdges(vertex); + for (Edge e : edges) { + Vertex successor = e.getTo(); + int successorIndex = toI(successor); + if (successorIndex < startIndex) { + continue; + } + if (!vIndex.containsKey(successor)) { + getSCCs(startIndex, successorIndex); + vLowlink.put(vertex, Math.min(vLowlink.get(vertex), vLowlink.get(successor))); + } else if (pathSet.contains(successor)) { + vLowlink.put(vertex, Math.min(vLowlink.get(vertex), vIndex.get(successor))); + } + } + if (vLowlink.get(vertex).equals(vIndex.get(vertex))) { + Set result = new LinkedHashSet<>(); + Vertex temp; + do { + temp = path.pop(); + pathSet.remove(temp); + result.add(temp); + } while (!vertex.equals(temp)); + if (result.size() == 1) { + Vertex v = result.iterator().next(); + if (graph.containsEdge(vertex, v)) { + foundSCCs.add(result); + } + } else { + foundSCCs.add(result); + } + } + } + + private boolean findCyclesInSCG(int startIndex, int vertexIndex, SchemaGraph scg) { + /* + * Find cycles in a strongly connected graph per Johnson. + */ + boolean foundCycle = false; + Vertex vertex = toV(vertexIndex); + stack.push(vertex); + blocked.add(vertex); + + for (Edge e : scg.getAdjacentEdges(vertex)) { + Vertex successor = e.getTo(); + int successorIndex = toI(successor); + if (successorIndex == startIndex) { + List cycle = new ArrayList<>(stack.size()); + stack.descendingIterator().forEachRemaining(cycle::add); + this.foundCycles.add(cycle); + foundCycle = true; + } else if (!blocked.contains(successor)) { + boolean gotCycle = findCyclesInSCG(startIndex, successorIndex, scg); + foundCycle = foundCycle || gotCycle; + } + } + if (foundCycle) { + unblock(vertex); + } else { + for (Edge ew : scg.getAdjacentEdges(vertex)) { + Vertex w = ew.getTo(); + Set bSet = getBSet(w); + bSet.add(vertex); + } + } + stack.pop(); + return foundCycle; + } + + private void unblock(Vertex vertex) { + blocked.remove(vertex); + Set bSet = getBSet(vertex); + while (bSet.size() > 0) { + Vertex w = bSet.iterator().next(); + bSet.remove(w); + if (blocked.contains(w)) { + unblock(w); + } + } + } + + + private void initMinSCGState() { + index = 0; + foundSCCs = new ArrayList<>(); + vIndex = new LinkedHashMap<>(); + vLowlink = new LinkedHashMap<>(); + path = new ArrayDeque<>(); + pathSet = new LinkedHashSet<>(); + } + + private void clearMinSCCState() { + index = 0; + foundSCCs = null; + vIndex = null; + vLowlink = null; + path = null; + pathSet = null; + } + + private Integer toI(Vertex vertex) { + return vToI.get(vertex); + } + + private Vertex toV(Integer i) { + return iToV[i]; + } + + private Set getBSet(Vertex v) { + // B sets typically not all needed, + // so instantiate lazily. + return bSets.computeIfAbsent(v, k -> new LinkedHashSet<>()); + } + + + } +} diff --git a/src/main/java/graphql/util/IdGenerator.java b/src/main/java/graphql/util/IdGenerator.java new file mode 100644 index 0000000000..f92784c4ac --- /dev/null +++ b/src/main/java/graphql/util/IdGenerator.java @@ -0,0 +1,75 @@ +/* + * Copyright 2002-2015 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +/* + * This class was taken from Spring https://github.com/spring-projects/spring-framework/blob/main/spring-core/src/main/java/org/springframework/util/AlternativeJdkIdGenerator.java + * as a way to get a more performant UUID generator for use as request ids. SecureRandom can be expensive + * to run on each request as per https://github.com/graphql-java/graphql-java/issues/3435, so this uses SecureRandom + * at application start and then the cheaper Random class each call after that. + */ +package graphql.util; + +import graphql.Internal; + +import java.math.BigInteger; +import java.security.SecureRandom; +import java.util.Random; +import java.util.UUID; + +/** + * An id generator that uses {@link SecureRandom} for the initial seed and + * {@link Random} thereafter. This provides a better balance between securely random ids and performance. + * + * @author Rossen Stoyanchev + * @author Rob Winch + */ +@Internal +public class IdGenerator { + + private static final IdGenerator idGenerator = new IdGenerator(); + + public static UUID uuid() { + return idGenerator.generateId(); + } + + private final Random random; + + + public IdGenerator() { + SecureRandom secureRandom = new SecureRandom(); + byte[] seed = new byte[8]; + secureRandom.nextBytes(seed); + this.random = new Random(new BigInteger(seed).longValue()); + } + + + public UUID generateId() { + byte[] randomBytes = new byte[16]; + this.random.nextBytes(randomBytes); + + long mostSigBits = 0; + for (int i = 0; i < 8; i++) { + mostSigBits = (mostSigBits << 8) | (randomBytes[i] & 0xff); + } + + long leastSigBits = 0; + for (int i = 8; i < 16; i++) { + leastSigBits = (leastSigBits << 8) | (randomBytes[i] & 0xff); + } + + return new UUID(mostSigBits, leastSigBits); + } + +} \ No newline at end of file diff --git a/src/main/java/graphql/util/Interning.java b/src/main/java/graphql/util/Interning.java new file mode 100644 index 0000000000..bc51b3ebff --- /dev/null +++ b/src/main/java/graphql/util/Interning.java @@ -0,0 +1,25 @@ +package graphql.util; + +import com.google.common.collect.Interner; +import com.google.common.collect.Interners; +import graphql.Internal; +import org.jetbrains.annotations.NotNull; + +/** + * Interner allowing object-identity comparison of key entities like field names. This is useful on hotspot + * areas like the engine where we look up field names a lot inside maps, and those maps use object identity first + * inside the key lookup code. + */ +@Internal +public class Interning { + + private Interning() { + } + + private static final Interner INTERNER = Interners.newWeakInterner(); + + public static @NotNull String intern(@NotNull String name) { + return INTERNER.intern(name); + } + +} diff --git a/src/main/java/graphql/util/LogKit.java b/src/main/java/graphql/util/LogKit.java deleted file mode 100644 index df65060e6b..0000000000 --- a/src/main/java/graphql/util/LogKit.java +++ /dev/null @@ -1,22 +0,0 @@ -package graphql.util; - -import graphql.Internal; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -@Internal -public class LogKit { - - /** - * Creates a logger with a name indicating that the content might not be privacy safe - * eg it could contain user generated content or privacy information. - * - * @param clazz the class to make a logger for - * - * @return a new Logger - */ - public static Logger getNotPrivacySafeLogger(Class clazz) { - return LoggerFactory.getLogger(String.format("notprivacysafe.%s", clazz.getName())); - } - -} diff --git a/src/main/java/graphql/util/StringKit.java b/src/main/java/graphql/util/StringKit.java new file mode 100644 index 0000000000..20fcea1b1f --- /dev/null +++ b/src/main/java/graphql/util/StringKit.java @@ -0,0 +1,20 @@ +package graphql.util; + +import java.util.Locale; + +public class StringKit { + + public static String capitalize(String s) { + if (s != null && !s.isEmpty()) { + StringBuilder sb = new StringBuilder(); + // see https://github.com/graphql-java/graphql-java/issues/3385 + sb.append(s.substring(0, 1).toUpperCase(Locale.ROOT)); + if (s.length() > 1) { + sb.append(s.substring(1)); + } + return sb.toString(); + } + return s; + } + +} diff --git a/src/main/java/graphql/util/TraverserState.java b/src/main/java/graphql/util/TraverserState.java index e44058f914..e478e05a0b 100644 --- a/src/main/java/graphql/util/TraverserState.java +++ b/src/main/java/graphql/util/TraverserState.java @@ -44,7 +44,7 @@ public void pushAll(TraverserContext traverserContext, Function { List children = childrenMap.get(key); for (int i = children.size() - 1; i >= 0; i--) { - U child = assertNotNull(children.get(i), () -> "null child for key " + key); + U child = assertNotNull(children.get(i), "null child for key %s",key); NodeLocation nodeLocation = new NodeLocation(key, i); DefaultTraverserContext context = super.newContext(child, traverserContext, nodeLocation); super.state.push(context); @@ -72,7 +72,7 @@ public void pushAll(TraverserContext traverserContext, Function { List children = childrenMap.get(key); for (int i = 0; i < children.size(); i++) { - U child = assertNotNull(children.get(i), () -> "null child for key " + key); + U child = assertNotNull(children.get(i), "null child for key %s",key); NodeLocation nodeLocation = new NodeLocation(key, i); DefaultTraverserContext context = super.newContext(child, traverserContext, nodeLocation); childrenContextMap.computeIfAbsent(key, notUsed -> new ArrayList<>()); diff --git a/src/main/java/graphql/util/TreeParallelTransformer.java b/src/main/java/graphql/util/TreeParallelTransformer.java index 0102af2eff..3003ac3772 100644 --- a/src/main/java/graphql/util/TreeParallelTransformer.java +++ b/src/main/java/graphql/util/TreeParallelTransformer.java @@ -223,7 +223,7 @@ private List pushAll(TraverserContext traverserConte childrenMap.keySet().forEach(key -> { List children = childrenMap.get(key); for (int i = children.size() - 1; i >= 0; i--) { - T child = assertNotNull(children.get(i), () -> String.format("null child for key %s", key)); + T child = assertNotNull(children.get(i), "null child for key %s", key); NodeLocation nodeLocation = new NodeLocation(key, i); DefaultTraverserContext context = newContext(child, traverserContext, nodeLocation); contexts.push(context); diff --git a/src/main/java/graphql/util/TreeParallelTraverser.java b/src/main/java/graphql/util/TreeParallelTraverser.java index db7f27dbaa..75a903be13 100644 --- a/src/main/java/graphql/util/TreeParallelTraverser.java +++ b/src/main/java/graphql/util/TreeParallelTraverser.java @@ -162,7 +162,7 @@ private List pushAll(TraverserContext traverserConte childrenMap.keySet().forEach(key -> { List children = childrenMap.get(key); for (int i = children.size() - 1; i >= 0; i--) { - T child = assertNotNull(children.get(i), () -> String.format("null child for key %s", key)); + T child = assertNotNull(children.get(i), "null child for key %s", key); NodeLocation nodeLocation = new NodeLocation(key, i); DefaultTraverserContext context = newContext(child, traverserContext, nodeLocation); contexts.push(context); diff --git a/src/main/java/graphql/validation/AbstractRule.java b/src/main/java/graphql/validation/AbstractRule.java index c5c2f5a56a..0cc31edc75 100644 --- a/src/main/java/graphql/validation/AbstractRule.java +++ b/src/main/java/graphql/validation/AbstractRule.java @@ -1,6 +1,7 @@ package graphql.validation; +import graphql.ExperimentalApi; import graphql.Internal; import graphql.i18n.I18nMsg; import graphql.language.Argument; @@ -90,6 +91,17 @@ protected List getQueryPath() { return validationContext.getQueryPath(); } + /** + * Verifies if the experimental API key is enabled + * @param key to be checked + * @return if the experimental API key is enabled + */ + protected Boolean isExperimentalApiKeyEnabled(String key) { + return (getValidationContext() != null && + getValidationContext().getGraphQLContext() != null || + getValidationContext().getGraphQLContext().get(key) != null || + ((Boolean) getValidationContext().getGraphQLContext().get(key))); + } /** * Creates an I18n message using the {@link graphql.i18n.I18nMsg} * diff --git a/src/main/java/graphql/validation/TraversalContext.java b/src/main/java/graphql/validation/TraversalContext.java index 93bbdb62e6..d9d93af7f1 100644 --- a/src/main/java/graphql/validation/TraversalContext.java +++ b/src/main/java/graphql/validation/TraversalContext.java @@ -178,7 +178,7 @@ private void enterImpl(ObjectField objectField) { GraphQLInputObjectField inputField = null; if (objectType instanceof GraphQLInputObjectType) { GraphQLInputObjectType inputObjectType = (GraphQLInputObjectType) objectType; - inputField = schema.getFieldVisibility().getFieldDefinition(inputObjectType, objectField.getName()); + inputField = schema.getCodeRegistry().getFieldVisibility().getFieldDefinition(inputObjectType, objectField.getName()); if (inputField != null) { inputType = inputField.getType(); } @@ -337,7 +337,7 @@ private GraphQLFieldDefinition getFieldDef(GraphQLSchema schema, GraphQLType par return schema.getIntrospectionTypenameFieldDefinition(); } if (parentType instanceof GraphQLFieldsContainer) { - return schema.getFieldVisibility().getFieldDefinition((GraphQLFieldsContainer) parentType, field.getName()); + return schema.getCodeRegistry().getFieldVisibility().getFieldDefinition((GraphQLFieldsContainer) parentType, field.getName()); } return null; } diff --git a/src/main/java/graphql/validation/ValidationError.java b/src/main/java/graphql/validation/ValidationError.java index 841db1c17a..04c1f88936 100644 --- a/src/main/java/graphql/validation/ValidationError.java +++ b/src/main/java/graphql/validation/ValidationError.java @@ -2,7 +2,6 @@ import com.google.common.collect.ImmutableMap; -import graphql.DeprecatedAt; import graphql.ErrorType; import graphql.GraphQLError; import graphql.GraphqlErrorHelper; @@ -24,51 +23,6 @@ public class ValidationError implements GraphQLError { private final List queryPath = new ArrayList<>(); private final ImmutableMap extensions; - @Deprecated - @DeprecatedAt("2022-07-10") - public ValidationError(ValidationErrorClassification validationErrorType) { - this(newValidationError() - .validationErrorType(validationErrorType)); - } - - @Deprecated - @DeprecatedAt("2022-07-10") - public ValidationError(ValidationErrorClassification validationErrorType, SourceLocation sourceLocation, String description) { - this(newValidationError() - .validationErrorType(validationErrorType) - .sourceLocation(sourceLocation) - .description(description)); - } - - @Deprecated - @DeprecatedAt("2022-07-10") - public ValidationError(ValidationErrorType validationErrorType, SourceLocation sourceLocation, String description, List queryPath) { - this(newValidationError() - .validationErrorType(validationErrorType) - .sourceLocation(sourceLocation) - .description(description) - .queryPath(queryPath)); - } - - @Deprecated - @DeprecatedAt("2022-07-10") - public ValidationError(ValidationErrorType validationErrorType, List sourceLocations, String description) { - this(newValidationError() - .validationErrorType(validationErrorType) - .sourceLocations(sourceLocations) - .description(description)); - } - - @Deprecated - @DeprecatedAt("2022-07-10") - public ValidationError(ValidationErrorType validationErrorType, List sourceLocations, String description, List queryPath) { - this(newValidationError() - .validationErrorType(validationErrorType) - .sourceLocations(sourceLocations) - .description(description) - .queryPath(queryPath)); - } - private ValidationError(Builder builder) { this.validationErrorType = builder.validationErrorType; this.description = builder.description; diff --git a/src/main/java/graphql/validation/ValidationErrorType.java b/src/main/java/graphql/validation/ValidationErrorType.java index 5ae5be0aaf..5710a1b0b9 100644 --- a/src/main/java/graphql/validation/ValidationErrorType.java +++ b/src/main/java/graphql/validation/ValidationErrorType.java @@ -27,6 +27,7 @@ public enum ValidationErrorType implements ValidationErrorClassification { UnknownDirective, MisplacedDirective, UndefinedVariable, + VariableNotAllowed, UnusedVariable, FragmentCycle, FieldsConflict, @@ -37,6 +38,7 @@ public enum ValidationErrorType implements ValidationErrorClassification { DuplicateFragmentName, DuplicateDirectiveName, DuplicateArgumentNames, + DuplicateIncrementalLabel, DuplicateVariableName, NullValueForNonNullArgument, SubscriptionMultipleRootFields, diff --git a/src/main/java/graphql/validation/Validator.java b/src/main/java/graphql/validation/Validator.java index 54558c617a..d7c3db2fdc 100644 --- a/src/main/java/graphql/validation/Validator.java +++ b/src/main/java/graphql/validation/Validator.java @@ -1,11 +1,15 @@ package graphql.validation; +import graphql.ExperimentalApi; import graphql.Internal; import graphql.i18n.I18n; import graphql.language.Document; import graphql.schema.GraphQLSchema; import graphql.validation.rules.ArgumentsOfCorrectType; +import graphql.validation.rules.DeferDirectiveLabel; +import graphql.validation.rules.DeferDirectiveOnRootLevel; +import graphql.validation.rules.DeferDirectiveOnValidOperation; import graphql.validation.rules.UniqueObjectFieldName; import graphql.validation.rules.ExecutableDefinitions; import graphql.validation.rules.FieldsOnCorrectType; @@ -157,6 +161,14 @@ public List createRules(ValidationContext validationContext, Valid UniqueObjectFieldName uniqueObjectFieldName = new UniqueObjectFieldName(validationContext, validationErrorCollector); rules.add(uniqueObjectFieldName); + DeferDirectiveOnRootLevel deferDirectiveOnRootLevel = new DeferDirectiveOnRootLevel(validationContext, validationErrorCollector); + rules.add(deferDirectiveOnRootLevel); + + DeferDirectiveOnValidOperation deferDirectiveOnValidOperation = new DeferDirectiveOnValidOperation(validationContext, validationErrorCollector); + rules.add(deferDirectiveOnValidOperation); + + DeferDirectiveLabel deferDirectiveLabel = new DeferDirectiveLabel(validationContext, validationErrorCollector); + rules.add(deferDirectiveLabel); return rules; } } diff --git a/src/main/java/graphql/validation/rules/DeferDirectiveLabel.java b/src/main/java/graphql/validation/rules/DeferDirectiveLabel.java new file mode 100644 index 0000000000..03a962d3d6 --- /dev/null +++ b/src/main/java/graphql/validation/rules/DeferDirectiveLabel.java @@ -0,0 +1,68 @@ +package graphql.validation.rules; + +import graphql.Directives; +import graphql.ExperimentalApi; +import graphql.language.Argument; +import graphql.language.Directive; +import graphql.language.Node; +import graphql.language.NullValue; +import graphql.language.StringValue; +import graphql.language.Value; +import graphql.validation.AbstractRule; +import graphql.validation.ValidationContext; +import graphql.validation.ValidationErrorCollector; + +import java.util.LinkedHashSet; +import java.util.List; +import java.util.Set; + +import static graphql.validation.ValidationErrorType.DuplicateArgumentNames; +import static graphql.validation.ValidationErrorType.DuplicateIncrementalLabel; +import static graphql.validation.ValidationErrorType.VariableNotAllowed; +import static graphql.validation.ValidationErrorType.WrongType; + +/** + * Defer and stream directive labels are unique + * + * A GraphQL document is only valid if defer and stream directives' label argument is static and unique. + * + * See proposed spec:spec/Section 5 -- Validation.md ### ### Defer And Stream Directive Labels Are Unique + */ +@ExperimentalApi +public class DeferDirectiveLabel extends AbstractRule { + private Set checkedLabels = new LinkedHashSet<>(); + public DeferDirectiveLabel(ValidationContext validationContext, ValidationErrorCollector validationErrorCollector) { + super(validationContext, validationErrorCollector); + } + + @Override + public void checkDirective(Directive directive, List ancestors) { + // ExperimentalApi.ENABLE_INCREMENTAL_SUPPORT must be true + if (!isExperimentalApiKeyEnabled(ExperimentalApi.ENABLE_INCREMENTAL_SUPPORT) || + !Directives.DeferDirective.getName().equals(directive.getName()) || + directive.getArguments().size() == 0) { + return; + } + + Argument labelArgument = directive.getArgument("label"); + if (labelArgument == null || labelArgument.getValue() instanceof NullValue){ + return; + } + Value labelArgumentValue = labelArgument.getValue(); + + if (!(labelArgumentValue instanceof StringValue)) { + String message = i18n(WrongType, "DeferDirective.labelMustBeStaticString"); + addError(WrongType, directive.getSourceLocation(), message); + } else { + if (checkedLabels.contains(((StringValue) labelArgumentValue).getValue())) { + String message = i18n(DuplicateIncrementalLabel, "IncrementalDirective.uniqueArgument", labelArgument.getName(), directive.getName()); + addError(DuplicateIncrementalLabel, directive.getSourceLocation(), message); + } else { + checkedLabels.add(((StringValue) labelArgumentValue).getValue()); + } + } + } + + + +} \ No newline at end of file diff --git a/src/main/java/graphql/validation/rules/DeferDirectiveOnRootLevel.java b/src/main/java/graphql/validation/rules/DeferDirectiveOnRootLevel.java new file mode 100644 index 0000000000..5b907bf29a --- /dev/null +++ b/src/main/java/graphql/validation/rules/DeferDirectiveOnRootLevel.java @@ -0,0 +1,59 @@ +package graphql.validation.rules; + +import graphql.Directives; +import graphql.ExperimentalApi; +import graphql.language.Directive; +import graphql.language.Node; +import graphql.language.OperationDefinition; +import graphql.schema.GraphQLCompositeType; +import graphql.schema.GraphQLObjectType; +import graphql.validation.AbstractRule; +import graphql.validation.ValidationContext; +import graphql.validation.ValidationErrorCollector; + +import java.util.Arrays; +import java.util.LinkedHashSet; +import java.util.List; +import java.util.Optional; +import java.util.Set; + +import static graphql.validation.ValidationErrorType.MisplacedDirective; + +/** + * Defer and stream directives are used on valid root field + * + * A GraphQL document is only valid if defer directives are not used on root mutation or subscription types. + * + * See proposed spec:spec/Section 5 -- Validation.md ### Defer And Stream Directives Are Used On Valid Root Field + */ +@ExperimentalApi +public class DeferDirectiveOnRootLevel extends AbstractRule { + private Set invalidOperations = new LinkedHashSet(Arrays.asList(OperationDefinition.Operation.MUTATION, OperationDefinition.Operation.SUBSCRIPTION)); + public DeferDirectiveOnRootLevel(ValidationContext validationContext, ValidationErrorCollector validationErrorCollector) { + super(validationContext, validationErrorCollector); + this.setVisitFragmentSpreads(true); + } + + @Override + public void checkDirective(Directive directive, List ancestors) { + // ExperimentalApi.ENABLE_INCREMENTAL_SUPPORT must be true + if (!isExperimentalApiKeyEnabled(ExperimentalApi.ENABLE_INCREMENTAL_SUPPORT)) { + return; + } + + if (!Directives.DeferDirective.getName().equals(directive.getName())) { + return; + } + GraphQLObjectType mutationType = getValidationContext().getSchema().getMutationType(); + GraphQLObjectType subscriptionType = getValidationContext().getSchema().getSubscriptionType(); + GraphQLCompositeType parentType = getValidationContext().getParentType(); + if (mutationType != null && parentType != null && parentType.getName().equals(mutationType.getName())){ + String message = i18n(MisplacedDirective, "DeferDirective.notAllowedOperationRootLevelMutation", parentType.getName()); + addError(MisplacedDirective, directive.getSourceLocation(), message); + } else if (subscriptionType != null && parentType != null && parentType.getName().equals(subscriptionType.getName())) { + String message = i18n(MisplacedDirective, "DeferDirective.notAllowedOperationRootLevelSubscription", parentType.getName()); + addError(MisplacedDirective, directive.getSourceLocation(), message); + } + } + +} diff --git a/src/main/java/graphql/validation/rules/DeferDirectiveOnValidOperation.java b/src/main/java/graphql/validation/rules/DeferDirectiveOnValidOperation.java new file mode 100644 index 0000000000..6baa9df948 --- /dev/null +++ b/src/main/java/graphql/validation/rules/DeferDirectiveOnValidOperation.java @@ -0,0 +1,84 @@ +package graphql.validation.rules; + +import graphql.Directives; +import graphql.ExperimentalApi; +import graphql.language.Argument; +import graphql.language.BooleanValue; +import graphql.language.Directive; +import graphql.language.Node; +import graphql.language.OperationDefinition; +import graphql.language.VariableReference; +import graphql.validation.AbstractRule; +import graphql.validation.ValidationContext; +import graphql.validation.ValidationErrorCollector; + +import java.util.List; +import java.util.Optional; + +import static graphql.language.OperationDefinition.Operation.SUBSCRIPTION; +import static graphql.validation.ValidationErrorType.MisplacedDirective; + +/** + * Defer Directive is Used On Valid Operations + * + * A GraphQL document is only valid if defer directives are not used on subscription types. + * + * See proposed spec:spec/Section 5 -- Validation.md ### Defer And Stream Directives Are Used On Valid Operations + * + */ +@ExperimentalApi +public class DeferDirectiveOnValidOperation extends AbstractRule { + public DeferDirectiveOnValidOperation(ValidationContext validationContext, ValidationErrorCollector validationErrorCollector) { + super(validationContext, validationErrorCollector); + this.setVisitFragmentSpreads(true); + } + + + @Override + public void checkDirective(Directive directive, List ancestors) { + // ExperimentalApi.ENABLE_INCREMENTAL_SUPPORT must be true + if (!isExperimentalApiKeyEnabled(ExperimentalApi.ENABLE_INCREMENTAL_SUPPORT)) { + return; + } + + if (!Directives.DeferDirective.getName().equals(directive.getName())) { + return; + } + // check if the directive is on allowed operation + Optional operationDefinition = getOperationDefinition(ancestors); + if (operationDefinition.isPresent() && + SUBSCRIPTION.equals(operationDefinition.get().getOperation()) && + !ifArgumentMightBeFalse(directive) ){ + String message = i18n(MisplacedDirective, "IncrementalDirective.notAllowedSubscriptionOperation", directive.getName()); + addError(MisplacedDirective, directive.getSourceLocation(), message); + } + } + + /** + * Extract from ancestors the OperationDefinition using the document ancestor. + * @param ancestors list of ancestors + * @return Optional of OperationDefinition + */ + private Optional getOperationDefinition(List ancestors) { + return ancestors.stream() + .filter(doc -> doc instanceof OperationDefinition) + .map((def -> (OperationDefinition) def)) + .findFirst(); + } + + private Boolean ifArgumentMightBeFalse(Directive directive) { + Argument ifArgument = directive.getArgumentsByName().get("if"); + if (ifArgument == null) { + return false; + } + if(ifArgument.getValue() instanceof BooleanValue){ + return !((BooleanValue) ifArgument.getValue()).isValue(); + } + if(ifArgument.getValue() instanceof VariableReference){ + return true; + } + return false; + } + +} + diff --git a/src/main/java/graphql/validation/rules/SubscriptionUniqueRootField.java b/src/main/java/graphql/validation/rules/SubscriptionUniqueRootField.java index bd73db3dd5..0ded9ca632 100644 --- a/src/main/java/graphql/validation/rules/SubscriptionUniqueRootField.java +++ b/src/main/java/graphql/validation/rules/SubscriptionUniqueRootField.java @@ -1,11 +1,15 @@ package graphql.validation.rules; import graphql.Internal; -import graphql.language.Field; -import graphql.language.FragmentDefinition; -import graphql.language.FragmentSpread; +import graphql.execution.CoercedVariables; +import graphql.execution.FieldCollector; +import graphql.execution.FieldCollectorParameters; +import graphql.execution.MergedField; +import graphql.execution.MergedSelectionSet; +import graphql.language.NodeUtil; import graphql.language.OperationDefinition; import graphql.language.Selection; +import graphql.schema.GraphQLObjectType; import graphql.validation.AbstractRule; import graphql.validation.ValidationContext; import graphql.validation.ValidationErrorCollector; @@ -24,6 +28,7 @@ */ @Internal public class SubscriptionUniqueRootField extends AbstractRule { + private final FieldCollector fieldCollector = new FieldCollector(); public SubscriptionUniqueRootField(ValidationContext validationContext, ValidationErrorCollector validationErrorCollector) { super(validationContext, validationErrorCollector); } @@ -31,36 +36,37 @@ public SubscriptionUniqueRootField(ValidationContext validationContext, Validati @Override public void checkOperationDefinition(OperationDefinition operationDef) { if (operationDef.getOperation() == SUBSCRIPTION) { + + GraphQLObjectType subscriptionType = getValidationContext().getSchema().getSubscriptionType(); + + FieldCollectorParameters collectorParameters = FieldCollectorParameters.newParameters() + .schema(getValidationContext().getSchema()) + .fragments(NodeUtil.getFragmentsByName(getValidationContext().getDocument())) + .variables(CoercedVariables.emptyVariables().toMap()) + .objectType(subscriptionType) + .graphQLContext(getValidationContext().getGraphQLContext()) + .build(); + + MergedSelectionSet fields = fieldCollector.collectFields(collectorParameters, operationDef.getSelectionSet()); List subscriptionSelections = operationDef.getSelectionSet().getSelections(); - if (subscriptionSelections.size() > 1) { + if (fields.size() > 1) { String message = i18n(SubscriptionMultipleRootFields, "SubscriptionUniqueRootField.multipleRootFields", operationDef.getName()); addError(SubscriptionMultipleRootFields, operationDef.getSourceLocation(), message); } else { // Only one item in selection set, size == 1 - Selection rootSelection = subscriptionSelections.get(0); - if (isIntrospectionField(rootSelection)) { - String message = i18n(SubscriptionIntrospectionRootField, "SubscriptionIntrospectionRootField.introspectionRootField", operationDef.getName(), ((Field) rootSelection).getName()); - addError(SubscriptionIntrospectionRootField, rootSelection.getSourceLocation(), message); - } else if (rootSelection instanceof FragmentSpread) { - // If the only item in selection set is a fragment, inspect the fragment. - String fragmentName = ((FragmentSpread) rootSelection).getName(); - FragmentDefinition fragmentDef = getValidationContext().getFragment(fragmentName); - List fragmentSelections = fragmentDef.getSelectionSet().getSelections(); + MergedField mergedField = fields.getSubFieldsList().get(0); + - if (fragmentSelections.size() > 1) { - String message = i18n(SubscriptionMultipleRootFields, "SubscriptionUniqueRootField.multipleRootFieldsWithFragment", operationDef.getName()); - addError(SubscriptionMultipleRootFields, rootSelection.getSourceLocation(), message); - } else if (isIntrospectionField(fragmentSelections.get(0))) { - String message = i18n(SubscriptionIntrospectionRootField, "SubscriptionIntrospectionRootField.introspectionRootFieldWithFragment", operationDef.getName(), ((Field) fragmentSelections.get(0)).getName()); - addError(SubscriptionIntrospectionRootField, rootSelection.getSourceLocation(), message); - } + if (isIntrospectionField(mergedField)) { + String message = i18n(SubscriptionIntrospectionRootField, "SubscriptionIntrospectionRootField.introspectionRootField", operationDef.getName(), mergedField.getName()); + addError(SubscriptionIntrospectionRootField, mergedField.getSingleField().getSourceLocation(), message); } } } } - private boolean isIntrospectionField(Selection selection) { - return selection instanceof Field && ((Field) selection).getName().startsWith("__"); + private boolean isIntrospectionField(MergedField field) { + return field.getName().startsWith("__"); } } diff --git a/src/main/resources/i18n/Execution_de.properties b/src/main/resources/i18n/Execution_de.properties new file mode 100644 index 0000000000..dd194540c7 --- /dev/null +++ b/src/main/resources/i18n/Execution_de.properties @@ -0,0 +1,8 @@ +# +# This resource bundle is used for the query execution code to produce i18n messages +# +# REMEMBER - a single quote ' in MessageFormat means things that are never replaced within them +# so use 2 '' characters to make it one ' on output. This will take for the form ''{0}'' +# +Execution.handleOneOfNotOneFieldError=Es muss genau ein Key angegeben werden für OneOf Typ ''{0}''. +Execution.handleOneOfValueIsNullError=OneOf type field ''{0}'' darf nicht null sein. diff --git a/src/main/resources/i18n/Execution_nl.properties b/src/main/resources/i18n/Execution_nl.properties new file mode 100644 index 0000000000..eee9f69ea0 --- /dev/null +++ b/src/main/resources/i18n/Execution_nl.properties @@ -0,0 +1,8 @@ +# +# This resource bundle is used for the query execution code to produce i18n messages +# +# REMEMBER - a single quote ' in MessageFormat means things that are never replaced within them +# so use 2 '' characters to make it one ' on output. This will take for the form ''{0}'' +# +Execution.handleOneOfNotOneFieldError=Er moet exact één sleutel aangegeven worden voor OneOf type ''{0}''. +Execution.handleOneOfValueIsNullError=OneOf type field ''{0}'' mag niet null zijn. diff --git a/src/main/resources/i18n/Parsing_de.properties b/src/main/resources/i18n/Parsing_de.properties index 919e5fee9c..2d3c43f777 100644 --- a/src/main/resources/i18n/Parsing_de.properties +++ b/src/main/resources/i18n/Parsing_de.properties @@ -10,17 +10,19 @@ # REMEMBER - a single quote ' in MessageFormat means things that are never replaced within them # so use 2 '' characters to make it one ' on output. This will take for the form ''{0}'' # -InvalidSyntax.noMessage=Ungültige Syntax in Zeile {0} Spalte {1} -InvalidSyntax.full=Ungültige Syntax, ANTLR-Fehler ''{0}'' in Zeile {1} Spalte {2} +InvalidSyntax.noMessage=Ungültige Syntax in Zeile {0} Spalte {1} +InvalidSyntax.full=Ungültige Syntax, ANTLR-Fehler ''{0}'' in Zeile {1} Spalte {2} -InvalidSyntaxBail.noToken=Ungültige Syntax in Zeile {0} Spalte {1} -InvalidSyntaxBail.full=Ungültige Syntax wegen des ungültigen Tokens ''{0}'' in Zeile {1} Spalte {2} +InvalidSyntaxBail.noToken=Ungültige Syntax in Zeile {0} Spalte {1} +InvalidSyntaxBail.full=Ungültige Syntax wegen des ungültigen Tokens ''{0}'' in Zeile {1} Spalte {2} # -InvalidSyntaxMoreTokens.full=Es wurde eine ungültige Syntax festgestellt. Es gibt zusätzliche Token im Text, die nicht konsumiert wurden. Ungültiges Token ''{0}'' in Zeile {1} Spalte {2} +InvalidSyntaxMoreTokens.full=Es wurde eine ungültige Syntax festgestellt. Es gibt zusätzliche Token im Text, die nicht konsumiert wurden. Ungültiges Token ''{0}'' in Zeile {1} Spalte {2} # -ParseCancelled.full=Es wurden mehr als {0} ''{1}'' Token präsentiert. Um Denial-of-Service-Angriffe zu verhindern, wurde das Parsing abgebrochen +ParseCancelled.full=Es wurden mehr als {0} ''{1}'' Token präsentiert. Um Denial-of-Service-Angriffe zu verhindern, wurde das Parsing abgebrochen. +ParseCancelled.tooDeep=Es wurden mehr als {0} tief ''{1}'' Regeln ausgeführt. Um Denial-of-Service-Angriffe zu verhindern, wurde das Parsing abgebrochen. +ParseCancelled.tooManyChars=Es wurden mehr als {0} Zeichen vorgelegt. Um Denial-of-Service-Angriffe zu verhindern, wurde das Parsing abgebrochen. # -InvalidUnicode.trailingLeadingSurrogate=Ungültiger Unicode gefunden. Trailing surrogate muss ein leading surrogate vorangestellt werden. Ungültiges Token ''{0}'' in Zeile {1} Spalte {2} -InvalidUnicode.leadingTrailingSurrogate=Ungültiger Unicode gefunden. Auf ein leading surrogate muss ein trailing surrogate folgen. Ungültiges Token ''{0}'' in Zeile {1} Spalte {2} -InvalidUnicode.invalidCodePoint=Ungültiger Unicode gefunden. Kein gültiger code point. Ungültiges Token ''{0}'' in Zeile {1} Spalte {2} -InvalidUnicode.incorrectEscape=Ungültiger Unicode gefunden. Falsch formatierte Escape-Sequenz. Ungültiges Token ''{0}'' in Zeile {1} Spalte {2} +InvalidUnicode.trailingLeadingSurrogate=Ungültiger Unicode gefunden. Trailing surrogate muss ein leading surrogate vorangestellt werden. Ungültiges Token ''{0}'' in Zeile {1} Spalte {2} +InvalidUnicode.leadingTrailingSurrogate=Ungültiger Unicode gefunden. Auf ein leading surrogate muss ein trailing surrogate folgen. Ungültiges Token ''{0}'' in Zeile {1} Spalte {2} +InvalidUnicode.invalidCodePoint=Ungültiger Unicode gefunden. Kein gültiger code point. Ungültiges Token ''{0}'' in Zeile {1} Spalte {2} +InvalidUnicode.incorrectEscape=Ungültiger Unicode gefunden. Falsch formatierte Escape-Sequenz. Ungültiges Token ''{0}'' in Zeile {1} Spalte {2} diff --git a/src/main/resources/i18n/Parsing_nl.properties b/src/main/resources/i18n/Parsing_nl.properties new file mode 100644 index 0000000000..dfa099a91a --- /dev/null +++ b/src/main/resources/i18n/Parsing_nl.properties @@ -0,0 +1,27 @@ +# +# This resource bundle is used for the query parsing code to produce i18n messages +# +# The keys have the format of rule class name and then message type within that. Most rules +# will only have 1 or 2 message keys +# +# Please try and keep this sorted within rule class and use # between sections so the IDEA Ctrl-Alt-L reformat does not bunch +# them too tightly. +# +# REMEMBER - a single quote ' in MessageFormat means things that are never replaced within them +# so use 2 '' characters to make it one ' on output. This will take for the form ''{0}'' +# +InvalidSyntax.noMessage=Ongeldige syntaxis op lijn {0} kolom {1} +InvalidSyntax.full=Ongeldige syntaxis, ANTLR foutmelding ''{0}'' op lijn {1} kolom {2} +InvalidSyntaxBail.noToken=Ongeldige syntaxis op lijn {0} kolom {1} +InvalidSyntaxBail.full=Ongeldige syntaxis wegens ongeldige token ''{0}'' op lijn {1} kolom {2} +# +InvalidSyntaxMoreTokens.full=Ongeldige syntaxis tegengekomen. Er zijn tokens in de tekst die niet zijn verwerkt. Ongeldige token ''{0}'' op lijn {1} kolom {2} +# +ParseCancelled.full=Meer dan {0} ''{1}'' tokens zijn gepresenteerd. Om een DDoS-aanval te voorkomen is het parsen gestopt. +ParseCancelled.tooDeep=Meer dan {0} diep, ''{1}'' regels zijn uitgevoerd. Om een DDoS-aanval te voorkomen is het parsen gestopt. +ParseCancelled.tooManyChars=Meer dan {0} tekens zijn voorgelegd. Om een DDoS-aanval te voorkomen is het parsen gestopt. +# +InvalidUnicode.trailingLeadingSurrogate=Ongeldige Unicode tegengekomen. Trailing surrogate moet vooropgaan aan een leading surrogate. Ongeldige token ''{0}'' op lijn {1} kolom {2} +InvalidUnicode.leadingTrailingSurrogate=Ongeldige Unicode tegengekomen. Leading surrogate moet voorafgaan aan een trailing surrogate. Ongeldige token ''{0}'' op lijn {1} kolom {2} +InvalidUnicode.invalidCodePoint=Ongeldige Unicode tegengekomen. Ongeldig codepunt. Ongeldige token ''{0}'' op lijn {1} kolom {2} +InvalidUnicode.incorrectEscape=Ongeldige Unicode tegengekomen. Ongeldige geformatteerde escape-sequentie. Ongeldige token ''{0}'' op lijn {1} kolom {2}} diff --git a/src/main/resources/i18n/Scalars.properties b/src/main/resources/i18n/Scalars.properties index 0897fe58eb..39fc6b4105 100644 --- a/src/main/resources/i18n/Scalars.properties +++ b/src/main/resources/i18n/Scalars.properties @@ -24,6 +24,10 @@ ID.unexpectedAstType=Expected an AST type of ''IntValue'' or ''StringValue'' but # Float.notFloat=Expected a value that can be converted to type ''Float'' but it was a ''{0}'' Float.unexpectedAstType=Expected an AST type of ''IntValue'' or ''FloatValue'' but it was a ''{0}'' +Float.unexpectedRawValueType=Expected a Number input, but it was a ''{0}'' # Boolean.notBoolean=Expected a value that can be converted to type ''Boolean'' but it was a ''{0}'' Boolean.unexpectedAstType=Expected an AST type of ''BooleanValue'' but it was a ''{0}'' +Boolean.unexpectedRawValueType=Expected a Boolean input, but it was a ''{0}'' +# +String.unexpectedRawValueType=Expected a String input, but it was a ''{0}'' diff --git a/src/main/resources/i18n/Scalars_de.properties b/src/main/resources/i18n/Scalars_de.properties index 645ed5e7ed..242046369b 100644 --- a/src/main/resources/i18n/Scalars_de.properties +++ b/src/main/resources/i18n/Scalars_de.properties @@ -12,9 +12,9 @@ # Scalar.unexpectedAstType=Erwartet wurde ein AST type von ''{0}'', aber es war ein ''{1}'' # -Enum.badInput=Ungültige Eingabe für enum ''{0}''. Unbekannter Wert ''{1}'' -Enum.badName=Ungültige Eingabe für enum ''{0}''. Kein Wert für den Namen ''{1}'' gefunden -Enum.unallowableValue=Literal nicht in den zulässigen Werten für enum ''{0}'' - ''{1}'' +Enum.badInput=Ungültige Eingabe für enum ''{0}''. Unbekannter Wert ''{1}'' +Enum.badName=Ungültige Eingabe für enum ''{0}''. Kein Wert für den Namen ''{1}'' gefunden +Enum.unallowableValue=Literal nicht in den zulässigen Werten für enum ''{0}'' - ''{1}'' # Int.notInt=Erwartet wurde ein Wert, der in den Typ ''Int'' konvertiert werden kann, aber es war ein ''{0}'' Int.outsideRange=Erwarteter Wert im Integer-Bereich, aber es war ein ''{0}'' @@ -24,6 +24,10 @@ ID.unexpectedAstType=Erwartet wurde ein AST type von ''IntValue'' oder ''StringV # Float.notFloat=Erwartet wurde ein Wert, der in den Typ ''Float'' konvertiert werden kann, aber es war ein ''{0}'' Float.unexpectedAstType=Erwartet wurde ein AST type von ''IntValue'' oder ''FloatValue'', aber es war ein ''{0}'' +Float.unexpectedRawValueType=Erwartet wurde eine Number-Eingabe, aber es war ein ''{0}'' # Boolean.notBoolean=Erwartet wurde ein Wert, der in den Typ ''Boolean'' konvertiert werden kann, aber es war ein ''{0}'' Boolean.unexpectedAstType=Erwartet wurde ein AST type ''BooleanValue'', aber es war ein ''{0}'' +Boolean.unexpectedRawValueType=Erwartet wurde eine Boolean-Eingabe, aber es war ein ''{0}'' +# +String.unexpectedRawValueType=Erwartet wurde eine String-Eingabe, aber es war ein ''{0}'' diff --git a/src/main/resources/i18n/Scalars_nl.properties b/src/main/resources/i18n/Scalars_nl.properties new file mode 100644 index 0000000000..9878c1716f --- /dev/null +++ b/src/main/resources/i18n/Scalars_nl.properties @@ -0,0 +1,36 @@ +# +# This resource bundle is used for the scalar code to produce i18n messages +# +# The keys have the format of rule class name and then message type within that. Most rules +# will only have 1 or 2 message keys +# +# Please try and keep this sorted within rule class and use # between sections so the IDEA Ctrl-Alt-L reformat does not bunch +# them too tightly. +# +# REMEMBER - a single quote ' in MessageFormat means things that are never replaced within them +# so use 2 '' characters to make it one ' on output. This will take for the form ''{0}'' +# +Scalar.unexpectedAstType=Verwacht werd een AST-type ''{0}'', maar het was een ''{1}'' +# +Enum.badInput=Ongeldige invoer voor enum ''{0}''. Onbekende waarde ''{1}'' +Enum.badName=Ongeldige invoer voor enum ''{0}''. Geen waarde gevonden voor naam ''{1}'' +Enum.unallowableValue=Literal is niet een toegestane waarde voor enum ''{0}'' - ''{1}'' +# +Int.notInt=Verwacht werd een waarde die in ''Int'' veranderd kon worden, maar het was een ''{0}'' +Int.outsideRange=Verwacht werd een waarde die binnen het integerbereik valt, maar het was een ''{0}'' +# +ID.notId=Verwacht werd een waarde die in ''ID'' veranderd kon worden, maar het was een ''{0}'' +ID.unexpectedAstType=Verwacht werd een AST-type ''IntValue'' of ''StringValue'', maar het was een ''{0}'' +# +Float.notFloat=Verwacht werd een waarde die in ''Float'' veranderd kon worden, maar het was een ''{0}'' +Float.unexpectedAstType=Verwacht werd een AST-type ''IntValue'' of ''FloatValue'', maar het was een ''{0}'' +# TODO: To be translated into Dutch +Float.unexpectedRawValueType=Expected a Number input, but it was a ''{0}'' +# +Boolean.notBoolean=Verwacht werd een waarde die in ''Boolean'' veranderd kon worden, maar het was een ''{0}'' +Boolean.unexpectedAstType=Verwacht werd een AST-type ''BooleanValue'', maar het was een ''{0}'' +# TODO: To be translated into Dutch +Boolean.unexpectedRawValueType=Expected a Boolean input, but it was a ''{0}'' +# +# TODO: To be translated into Dutch +String.unexpectedRawValueType=Expected a String input, but it was a ''{0}'' diff --git a/src/main/resources/i18n/Validation.properties b/src/main/resources/i18n/Validation.properties index 82c754db3e..c12920da07 100644 --- a/src/main/resources/i18n/Validation.properties +++ b/src/main/resources/i18n/Validation.properties @@ -10,6 +10,14 @@ # REMEMBER - a single quote ' in MessageFormat means things that are never replaced within them # so use 2 '' characters to make it one ' on output. This will take for the form ''{0}'' # + +DeferDirective.notAllowedOperationRootLevelMutation=Validation error ({0}) : Defer directive cannot be used on root mutation type ''{1}'' +DeferDirective.notAllowedOperationRootLevelSubscription=Validation error ({0}) : Defer directive cannot be used on root subscription type ''{1}'' +DeferDirective.labelMustBeStaticString= Validation error ({0}) : Defer directive?s label argument must be a static string +IncrementalDirective.notAllowedSubscriptionOperation=Validation error ({0}) : Directive ''{1}'' is not allowed to be used on operation subscription + +IncrementalDirective.uniqueArgument=Validation error ({0}) : There can be only one argument named ''{1}'' for directive defer/Stream +# ExecutableDefinitions.notExecutableType=Validation error ({0}) : Type ''{1}'' definition is not executable ExecutableDefinitions.notExecutableSchema=Validation error ({0}) : Schema definition is not executable ExecutableDefinitions.notExecutableDirective=Validation error ({0}) : Directive ''{1}'' definition is not executable @@ -98,3 +106,4 @@ ArgumentValidationUtil.handleNotObjectError=Validation error ({0}) : argument '' ArgumentValidationUtil.handleMissingFieldsError=Validation error ({0}) : argument ''{1}'' with value ''{2}'' is missing required fields ''{3}'' # suppress inspection "UnusedProperty" ArgumentValidationUtil.handleExtraFieldError=Validation error ({0}) : argument ''{1}'' with value ''{2}'' contains a field not in ''{3}'': ''{4}'' + diff --git a/src/main/resources/i18n/Validation_de.properties b/src/main/resources/i18n/Validation_de.properties index c15e3bb550..fec637643c 100644 --- a/src/main/resources/i18n/Validation_de.properties +++ b/src/main/resources/i18n/Validation_de.properties @@ -10,15 +10,15 @@ # REMEMBER - a single quote ' in MessageFormat means things that are never replaced within them # so use 2 '' characters to make it one ' on output. This will take for the form ''{0}'' # -ExecutableDefinitions.notExecutableType=Validierungsfehler ({0}) : Type definition ''{1}'' ist nicht ausführbar -ExecutableDefinitions.notExecutableSchema=Validierungsfehler ({0}) : Schema definition ist nicht ausführbar -ExecutableDefinitions.notExecutableDirective=Validierungsfehler ({0}) : Directive definition ''{1}'' ist nicht ausführbar -ExecutableDefinitions.notExecutableDefinition=Validierungsfehler ({0}) : Die angegebene Definition ist nicht ausführbar +ExecutableDefinitions.notExecutableType=Validierungsfehler ({0}) : Type definition ''{1}'' ist nicht ausführbar +ExecutableDefinitions.notExecutableSchema=Validierungsfehler ({0}) : Schema definition ist nicht ausführbar +ExecutableDefinitions.notExecutableDirective=Validierungsfehler ({0}) : Directive definition ''{1}'' ist nicht ausführbar +ExecutableDefinitions.notExecutableDefinition=Validierungsfehler ({0}) : Die angegebene Definition ist nicht ausführbar # FieldsOnCorrectType.unknownField=Validierungsfehler ({0}) : Feld ''{1}'' vom Typ ''{2}'' ist nicht definiert # -FragmentsOnCompositeType.invalidInlineTypeCondition=Validierungsfehler ({0}) : Inline fragment type condition ist ungültig, muss auf Object/Interface/Union stehen -FragmentsOnCompositeType.invalidFragmentTypeCondition=Validierungsfehler ({0}) : Fragment type condition ist ungültig, muss auf Object/Interface/Union stehen +FragmentsOnCompositeType.invalidInlineTypeCondition=Validierungsfehler ({0}) : Inline fragment type condition ist ungültig, muss auf Object/Interface/Union stehen +FragmentsOnCompositeType.invalidFragmentTypeCondition=Validierungsfehler ({0}) : Fragment type condition ist ungültig, muss auf Object/Interface/Union stehen # KnownArgumentNames.unknownDirectiveArg=Validierungsfehler ({0}) : Unbekanntes directive argument ''{1}'' KnownArgumentNames.unknownFieldArg=Validierungsfehler ({0}) : Unbekanntes field argument ''{1}'' @@ -45,17 +45,17 @@ OverlappingFieldsCanBeMerged.differentFields=Validierungsfehler ({0}) : ''{1}'' OverlappingFieldsCanBeMerged.differentArgs=Validierungsfehler ({0}) : ''{1}'' : Felder haben unterschiedliche Argumente OverlappingFieldsCanBeMerged.differentNullability=Validierungsfehler ({0}) : ''{1}'' : Felder haben unterschiedliche nullability shapes OverlappingFieldsCanBeMerged.differentLists=Validierungsfehler ({0}) : ''{1}'' : Felder haben unterschiedliche list shapes -OverlappingFieldsCanBeMerged.differentReturnTypes=Validierungsfehler ({0}) : ''{1}'' : gibt verschiedene Typen ''{2}'' und ''{3}'' zurück +OverlappingFieldsCanBeMerged.differentReturnTypes=Validierungsfehler ({0}) : ''{1}'' : gibt verschiedene Typen ''{2}'' und ''{3}'' zurück # -PossibleFragmentSpreads.inlineIncompatibleTypes=Validierungsfehler ({0}) : Fragment kann hier nicht verbreitet werden, da object vom Typ ''{1}'' niemals vom Typ ''{2}'' sein können -PossibleFragmentSpreads.fragmentIncompatibleTypes=Validierungsfehler ({0}) : Fragment ''{1}'' kann hier nicht verbreitet werden, da object vom Typ ''{2}'' niemals vom Typ ''{3}'' sein können +PossibleFragmentSpreads.inlineIncompatibleTypes=Validierungsfehler ({0}) : Fragment kann hier nicht verbreitet werden, da object vom Typ ''{1}'' niemals vom Typ ''{2}'' sein können +PossibleFragmentSpreads.fragmentIncompatibleTypes=Validierungsfehler ({0}) : Fragment ''{1}'' kann hier nicht verbreitet werden, da object vom Typ ''{2}'' niemals vom Typ ''{3}'' sein können # ProvidedNonNullArguments.missingFieldArg=Validierungsfehler ({0}) : Fehlendes field argument ''{1}'' ProvidedNonNullArguments.missingDirectiveArg=Validierungsfehler ({0}) : Fehlendes directive argument ''{1}'' -ProvidedNonNullArguments.nullValue=Validierungsfehler ({0}) : Nullwert für non-null field argument ''{1}'' +ProvidedNonNullArguments.nullValue=Validierungsfehler ({0}) : Nullwert für non-null field argument ''{1}'' # -ScalarLeaves.subselectionOnLeaf=Validierungsfehler ({0}) : Unterauswahl für Blatttyp ''{1}'' von Feld ''{2}'' nicht zulässig -ScalarLeaves.subselectionRequired=Validierungsfehler ({0}) : Unterauswahl erforderlich für Typ ''{1}'' des Feldes ''{2}'' +ScalarLeaves.subselectionOnLeaf=Validierungsfehler ({0}) : Unterauswahl für Blatttyp ''{1}'' von Feld ''{2}'' nicht zulässig +ScalarLeaves.subselectionRequired=Validierungsfehler ({0}) : Unterauswahl erforderlich für Typ ''{1}'' des Feldes ''{2}'' # SubscriptionUniqueRootField.multipleRootFields=Validierungsfehler ({0}) : Subscription operation ''{1}'' muss genau ein root field haben SubscriptionUniqueRootField.multipleRootFieldsWithFragment=Validierungsfehler ({0}) : Subscription operation ''{1}'' muss genau ein root field mit Fragmenten haben @@ -64,7 +64,7 @@ SubscriptionIntrospectionRootField.introspectionRootFieldWithFragment=Validierun # UniqueArgumentNames.uniqueArgument=Validierungsfehler ({0}) : Es kann nur ein Argument namens ''{1}'' geben # -UniqueDirectiveNamesPerLocation.uniqueDirectives=Validierungsfehler ({0}) : Nicht wiederholbare directive müssen innerhalb einer Lokation eindeutig benannt werden. Directive ''{1}'', die auf einem ''{2}'' verwendet wird, ist nicht eindeutig +UniqueDirectiveNamesPerLocation.uniqueDirectives=Validierungsfehler ({0}) : Nicht wiederholbare directive müssen innerhalb einer Lokation eindeutig benannt werden. Directive ''{1}'', die auf einem ''{2}'' verwendet wird, ist nicht eindeutig # UniqueFragmentNames.oneFragment=Validierungsfehler ({0}) : Es kann nur ein Fragment namens ''{1}'' geben # @@ -72,28 +72,29 @@ UniqueOperationNames.oneOperation=Validierungsfehler ({0}) : Es kann nur eine Op # UniqueVariableNames.oneVariable=Validierungsfehler ({0}) : Es kann nur eine Variable namens ''{1}'' geben # -VariableDefaultValuesOfCorrectType.badDefault=Validierungsfehler ({0}) : Ungültiger Standardwert ''{1}'' für Typ ''{2}'' +VariableDefaultValuesOfCorrectType.badDefault=Validierungsfehler ({0}) : Ungültiger Standardwert ''{1}'' für Typ ''{2}'' # VariablesAreInputTypes.wrongType=Validierungsfehler ({0}) : Eingabevariable ''{1}'' Typ ''{2}'' ist kein Eingabetyp # VariableTypesMatchRule.unexpectedType=Validierungsfehler ({0}) : Variable ''{1}'' vom Typ ''{2}'' verwendet in Position, die Typ ''{3}'' erwartet +UniqueObjectFieldName.duplicateFieldName=Validierungsfehler ({0}) : Es kann nur ein Feld mit Name ''{1}'' geben # # These are used but IDEA cant find them easily as being called # # suppress inspection "UnusedProperty" ArgumentValidationUtil.handleNullError=Validierungsfehler ({0}) : Argument ''{1}'' mit Wert ''{2}'' darf nicht null sein # suppress inspection "UnusedProperty" -ArgumentValidationUtil.handleScalarError=Validierungsfehler ({0}) : Argument ''{1}'' mit Wert ''{2}'' ist kein gültiges ''{3}'' +ArgumentValidationUtil.handleScalarError=Validierungsfehler ({0}) : Argument ''{1}'' mit Wert ''{2}'' ist kein gültiges ''{3}'' # suppress inspection "UnusedProperty" -ArgumentValidationUtil.handleScalarErrorCustomMessage=Validierungsfehler ({0}) : Argument ''{1}'' mit Wert ''{2}'' ist kein gültiges ''{3}'' - {4} +ArgumentValidationUtil.handleScalarErrorCustomMessage=Validierungsfehler ({0}) : Argument ''{1}'' mit Wert ''{2}'' ist kein gültiges ''{3}'' - {4} # suppress inspection "UnusedProperty" -ArgumentValidationUtil.handleEnumError=Validierungsfehler ({0}) : Argument ''{1}'' mit Wert ''{2}'' ist kein gültiges ''{3}'' +ArgumentValidationUtil.handleEnumError=Validierungsfehler ({0}) : Argument ''{1}'' mit Wert ''{2}'' ist kein gültiges ''{3}'' # suppress inspection "UnusedProperty" -ArgumentValidationUtil.handleEnumErrorCustomMessage=Validierungsfehler ({0}) : Argument ''{1}'' mit Wert ''{2}'' ist kein gültiges ''{3}'' - {4} +ArgumentValidationUtil.handleEnumErrorCustomMessage=Validierungsfehler ({0}) : Argument ''{1}'' mit Wert ''{2}'' ist kein gültiges ''{3}'' - {4} # suppress inspection "UnusedProperty" ArgumentValidationUtil.handleNotObjectError=Validierungsfehler ({0}) : Argument ''{1}'' mit Wert ''{2}'' muss ein object type sein # suppress inspection "UnusedProperty" ArgumentValidationUtil.handleMissingFieldsError=Validierungsfehler ({0}) : Argument ''{1}'' mit Wert ''{2}'' fehlen Pflichtfelder ''{3}'' # suppress inspection "UnusedProperty" -ArgumentValidationUtil.handleExtraFieldError=Validierungsfehler ({0}) : Argument ''{1}'' mit Wert ''{2}'' enthält ein Feld nicht in ''{3}'': ''{4}'' +ArgumentValidationUtil.handleExtraFieldError=Validierungsfehler ({0}) : Argument ''{1}'' mit Wert ''{2}'' enthält ein Feld nicht in ''{3}'': ''{4}'' # diff --git a/src/main/resources/i18n/Validation_nl.properties b/src/main/resources/i18n/Validation_nl.properties new file mode 100644 index 0000000000..e30b342640 --- /dev/null +++ b/src/main/resources/i18n/Validation_nl.properties @@ -0,0 +1,100 @@ +# +# This resource bundle is used for the query validation code to produce i18n messages +# +# The keys have the format of rule class name and then message type within that. Most rules +# will only have 1 or 2 message keys +# +# Please try and keep this sorted within rule class and use # between sections so the IDEA Ctrl-Alt-L reformat does not bunch +# them too tightly. +# +# REMEMBER - a single quote ' in MessageFormat means things that are never replaced within them +# so use 2 '' characters to make it one ' on output. This will take for the form ''{0}'' +# +ExecutableDefinitions.notExecutableType=Validatiefout ({0}) : Type definitie ''{1}'' is niet uitvoerbaar +ExecutableDefinitions.notExecutableSchema=Validatiefout ({0}) : Schema definitie is niet uitvoerbaar +ExecutableDefinitions.notExecutableDirective=Validatiefout ({0}) : Directive definitie ''{1}'' is niet uitvoerbaar +ExecutableDefinitions.notExecutableDefinition=Validatiefout ({0}) : Aangeleverde definition is niet uitvoerbaar +# +FieldsOnCorrectType.unknownField=Validatiefout ({0}) : Veld ''{1}'' in type ''{2}'' is ongedefinieerd +# +FragmentsOnCompositeType.invalidInlineTypeCondition=Validatiefout ({0}) : Inline fragment type condition is ongeldig, moet op een Object/Interface/Union zitten +FragmentsOnCompositeType.invalidFragmentTypeCondition=Validatiefout ({0}) : Fragment type condition is ongeldig, moet op een Object/Interface/Union zitten +# +KnownArgumentNames.unknownDirectiveArg=Validatiefout ({0}) : Onbekende directive argument ''{1}'' +KnownArgumentNames.unknownFieldArg=Validatiefout ({0}) : Onbekende field argument ''{1}'' +# +KnownDirectives.unknownDirective=Validatiefout ({0}) : Onbekende directive ''{1}'' +KnownDirectives.directiveNotAllowed=Validatiefout ({0}) : Directive ''{1}'' is hier niet toegestaan +# +KnownFragmentNames.undefinedFragment=Validatiefout ({0}) : Ongedefinieerd fragment ''{1}'' +# +KnownTypeNames.unknownType=Validatiefout ({0}) : Ongeldig type ''{1}'' +# +LoneAnonymousOperation.withOthers=Validatiefout ({0}) : Anonieme operation met andere operations +LoneAnonymousOperation.namedOperation=Validatiefout ({0}) : Operation ''{1}'' volgt een anonieme operatie +# +NoFragmentCycles.cyclesNotAllowed=Validatiefout ({0}) : Fragment cycles niet toegestaan +# +NoUndefinedVariables.undefinedVariable=Validatiefout ({0}) : Ongedefinieerde variabele ''{1}'' +# +NoUnusedFragments.unusedFragments=Validatiefout ({0}) : Ongebruikt fragment ''{1}'' +# +NoUnusedVariables.unusedVariable=Validatiefout ({0}) : Ongebruikte variabele ''{1}'' +# +OverlappingFieldsCanBeMerged.differentFields=Validatiefout ({0}) : ''{1}'' : ''{2}'' en ''{3}'' zijn verschillende velden +OverlappingFieldsCanBeMerged.differentArgs=Validatiefout ({0}) : ''{1}'' : velden hebben verschillende argumenten +OverlappingFieldsCanBeMerged.differentNullability=Validatiefout ({0}) : ''{1}'' : velden hebben verschillende nullability shapes +OverlappingFieldsCanBeMerged.differentLists=Validatiefout ({0}) : ''{1}'' : velden hebben verschillende vormen +OverlappingFieldsCanBeMerged.differentReturnTypes=Validatiefout ({0}) : ''{1}'' : retourneert verschillende types ''{2}'' en ''{3}'' +# +PossibleFragmentSpreads.inlineIncompatibleTypes=Validatiefout ({0}) : Fragment kan hier niet uitgespreid worden omdat een object van type ''{1}'' nooit van het type ''{2}'' kan zijn +PossibleFragmentSpreads.fragmentIncompatibleTypes=Validatiefout ({0}) : Fragment ''{1}'' kan hier niet uitgespreid worden omdat een object van type ''{2}'' nooit van het type ''{3}'' kan zijn +# +ProvidedNonNullArguments.missingFieldArg=Validatiefout ({0}) : Missend field argument ''{1}'' +ProvidedNonNullArguments.missingDirectiveArg=Validatiefout ({0}) : Missend directive argument ''{1}'' +ProvidedNonNullArguments.nullValue=Validatiefout ({0}) : Null waarde voor non-null field argument ''{1}'' +# +ScalarLeaves.subselectionOnLeaf=Validatiefout ({0}) : Sub-selectie niet toegestaan op leaf/uiteinde type ''{1}'' van veld ''{2}'' +ScalarLeaves.subselectionRequired=Validatiefout ({0}) : Sub-selectie verplicht voor type ''{1}'' van veld ''{2}'' +# +SubscriptionUniqueRootField.multipleRootFields=Validatiefout ({0}) : Subscription operation ''{1}'' moet exact één root field hebben +SubscriptionUniqueRootField.multipleRootFieldsWithFragment=Validatiefout ({0}) : Subscription operation ''{1}'' moet exact één root field met fragmenten hebben +SubscriptionIntrospectionRootField.introspectionRootField=Validatiefout ({0}) : Subscription operation ''{1}'' root field ''{2}'' kan geen introspectieveld zijn +SubscriptionIntrospectionRootField.introspectionRootFieldWithFragment=Validatiefout ({0}) : Subscription operation ''{1}'' fragment root field ''{2}'' kan geen introspectieveld zijn +# +UniqueArgumentNames.uniqueArgument=Validatiefout ({0}) : Er mag maar één argument met naam ''{1}'' bestaan +# +UniqueDirectiveNamesPerLocation.uniqueDirectives=Validatiefout ({0}) : Onherhaalbare directives moeten een unieke naam hebben binnen een locatie. De directive ''{1}'' gebruikt op een ''{2}'' is niet uniek +# +UniqueFragmentNames.oneFragment=Validatiefout ({0}) : Er mag maar één fragment met naam ''{1}'' bestaan +# +UniqueOperationNames.oneOperation=Validatiefout ({0}) : Er mag maar één operatie met naam ''{1}'' bestaan +# +UniqueVariableNames.oneVariable=Validatiefout ({0}) : Er mag maar één variabele met naam ''{1}'' bestaan +# +VariableDefaultValuesOfCorrectType.badDefault=Validatiefout ({0}) : Ongeldige standaardwaarde ''{1}'' voor type ''{2}'' +# +VariablesAreInputTypes.wrongType=Validatiefout ({0}) : Invoervariabele ''{1}'' type ''{2}'' is geen invoertype +# +VariableTypesMatchRule.unexpectedType=Validatiefout ({0}) : Variabele type ''{1}'' komt niet overeen met het verwachte type ''{2}'' +UniqueObjectFieldName.duplicateFieldName=Validatiefout ({0}) : Er kan slechts één veld genaamd ''{1}'' zijn +# +# These are used but IDEA cant find them easily as being called +# +# suppress inspection "UnusedProperty" +ArgumentValidationUtil.handleNullError=Validatiefout ({0}) : argument ''{1}'' met waarde ''{2}'' mag niet null zijn +# suppress inspection "UnusedProperty" +ArgumentValidationUtil.handleScalarError=Validatiefout ({0}) : argument ''{1}'' met waarde ''{2}'' is geen geldige ''{3}'' +# suppress inspection "UnusedProperty" +ArgumentValidationUtil.handleScalarErrorCustomMessage=Validatiefout ({0}) : argument ''{1}'' met waarde ''{2}'' is geen geldige ''{3}'' - {4} +# suppress inspection "UnusedProperty" +ArgumentValidationUtil.handleEnumError=Validatiefout ({0}) : argument ''{1}'' met waarde ''{2}'' is geen geldige ''{3}'' +# suppress inspection "UnusedProperty" +ArgumentValidationUtil.handleEnumErrorCustomMessage=Validatiefout ({0}) : argument ''{1}'' met waarde ''{2}'' is geen geldige ''{3}'' - {4} +# suppress inspection "UnusedProperty" +ArgumentValidationUtil.handleNotObjectError=Validatiefout ({0}) : argument ''{1}'' met waarde ''{2}'' moet een object type zijn +# suppress inspection "UnusedProperty" +ArgumentValidationUtil.handleMissingFieldsError=Validatiefout ({0}) : argument ''{1}'' met waarde ''{2}'' mist een verplicht veld ''{3}'' +# suppress inspection "UnusedProperty" +ArgumentValidationUtil.handleExtraFieldError=Validatiefout ({0}) : argument ''{1}'' met waarde ''{2}'' bevat een veld niet in ''{3}'': ''{4}'' +# diff --git a/src/test/groovy/example/http/ExecutionResultJSONTesting.java b/src/test/groovy/example/http/ExecutionResultJSONTesting.java index 965ac68e4b..019eb17616 100644 --- a/src/test/groovy/example/http/ExecutionResultJSONTesting.java +++ b/src/test/groovy/example/http/ExecutionResultJSONTesting.java @@ -70,7 +70,11 @@ private void testGson(HttpServletResponse response, Object er) throws IOExceptio private ExecutionResult createER() { List errors = new ArrayList<>(); - errors.add(new ValidationError(ValidationErrorType.UnknownType, mkLocations(), "Test ValidationError")); // Retain as there is no alternative constructor for ValidationError + errors.add(ValidationError.newValidationError() + .validationErrorType(ValidationErrorType.UnknownType) + .sourceLocations(mkLocations()) + .description("Test ValidationError") + .build()); errors.add(new MissingRootTypeException("Mutations are not supported.", null)); errors.add(new InvalidSyntaxError(mkLocations(), "Not good syntax m'kay")); errors.add(new NonNullableFieldWasNullError(new NonNullableFieldWasNullException(mkExecutionInfo(), mkPath()))); diff --git a/src/test/groovy/example/http/HttpMain.java b/src/test/groovy/example/http/HttpMain.java index 4f3b7c8936..b823b78060 100644 --- a/src/test/groovy/example/http/HttpMain.java +++ b/src/test/groovy/example/http/HttpMain.java @@ -4,9 +4,6 @@ import graphql.ExecutionResult; import graphql.GraphQL; import graphql.StarWarsData; -import graphql.execution.instrumentation.ChainedInstrumentation; -import graphql.execution.instrumentation.Instrumentation; -import graphql.execution.instrumentation.dataloader.DataLoaderDispatcherInstrumentation; import graphql.execution.instrumentation.tracing.TracingInstrumentation; import graphql.schema.DataFetcher; import graphql.schema.GraphQLObjectType; @@ -16,6 +13,9 @@ import graphql.schema.idl.SchemaGenerator; import graphql.schema.idl.SchemaParser; import graphql.schema.idl.TypeDefinitionRegistry; +import jakarta.servlet.ServletException; +import jakarta.servlet.http.HttpServletRequest; +import jakarta.servlet.http.HttpServletResponse; import org.dataloader.BatchLoader; import org.dataloader.DataLoader; import org.dataloader.DataLoaderFactory; @@ -27,9 +27,6 @@ import org.eclipse.jetty.server.handler.HandlerList; import org.eclipse.jetty.server.handler.ResourceHandler; -import jakarta.servlet.ServletException; -import jakarta.servlet.http.HttpServletRequest; -import jakarta.servlet.http.HttpServletResponse; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; @@ -42,9 +39,7 @@ import java.util.concurrent.CompletableFuture; import static graphql.ExecutionInput.newExecutionInput; -import static graphql.execution.instrumentation.dataloader.DataLoaderDispatcherInstrumentationOptions.newOptions; import static graphql.schema.idl.TypeRuntimeWiring.newTypeWiring; -import static java.util.Arrays.asList; /** * A very simple example of serving a graphql schema over http. @@ -140,18 +135,10 @@ private void handleStarWars(HttpServletRequest httpRequest, HttpServletResponse // you need a schema in order to execute queries GraphQLSchema schema = buildStarWarsSchema(); - DataLoaderDispatcherInstrumentation dlInstrumentation = - new DataLoaderDispatcherInstrumentation(newOptions().includeStatistics(true)); - - Instrumentation instrumentation = new ChainedInstrumentation( - asList(new TracingInstrumentation(), dlInstrumentation) - ); - // finally you build a runtime graphql object and execute the query GraphQL graphQL = GraphQL .newGraphQL(schema) - // instrumentation is pluggable - .instrumentation(instrumentation) + .instrumentation(new TracingInstrumentation()) .build(); ExecutionResult executionResult = graphQL.execute(executionInput); diff --git a/src/test/groovy/graphql/AssertTest.groovy b/src/test/groovy/graphql/AssertTest.groovy index 074c8b9b67..fe6d818f1f 100644 --- a/src/test/groovy/graphql/AssertTest.groovy +++ b/src/test/groovy/graphql/AssertTest.groovy @@ -2,34 +2,45 @@ package graphql import spock.lang.Specification +import static graphql.Assert.* + class AssertTest extends Specification { - def "assertNull should not throw on none null value"() { + def "assertNotNull should not throw on none null value"() { when: - Assert.assertNotNull("some object") + assertNotNull("some object") then: noExceptionThrown() } - def "assertNull should throw on null value"() { + def "assertNotNull should throw on null value"() { when: - Assert.assertNotNull(null) + assertNotNull(null) then: thrown(AssertException) } - def "assertNull with error message should not throw on none null value"() { + def "assertNotNull constant message should throw on null value"() { + when: + assertNotNull(null, "constant message") + + then: + def error = thrown(AssertException) + error.message == "constant message" + } + + def "assertNotNull with error message should not throw on none null value"() { when: - Assert.assertNotNull("some object", { -> "error message"}) + assertNotNull("some object", { -> "error message" }) then: noExceptionThrown() } - def "assertNull with error message should throw on null value with formatted message"() { + def "assertNotNull with error message should throw on null value with formatted message"() { when: - Assert.assertNotNull(value, { -> String.format(format, arg) }) + assertNotNull(value, { -> String.format(format, arg) }) then: def error = thrown(AssertException) @@ -42,9 +53,38 @@ class AssertTest extends Specification { null | "code" | null || "code" } + def "assertNotNull with different number of error args throws assertions"() { + when: + toRun.run() + + then: + def error = thrown(AssertException) + error.message == expectedMessage + + where: + toRun | expectedMessage + runnable({ assertNotNull(null, "error %s", "arg1") }) | "error arg1" + runnable({ assertNotNull(null, "error %s %s", "arg1", "arg2") }) | "error arg1 arg2" + runnable({ assertNotNull(null, "error %s %s %s", "arg1", "arg2", "arg3") }) | "error arg1 arg2 arg3" + } + + def "assertNotNull with different number of error args with non null does not throw assertions"() { + when: + toRun.run() + + then: + noExceptionThrown() + + where: + toRun | expectedMessage + runnable({ assertNotNull("x", "error %s", "arg1") }) | "error arg1" + runnable({ assertNotNull("x", "error %s %s", "arg1", "arg2") }) | "error arg1 arg2" + runnable({ assertNotNull("x", "error %s %s %s", "arg1", "arg2", "arg3") }) | "error arg1 arg2 arg3" + } + def "assertNeverCalled should always throw"() { when: - Assert.assertNeverCalled() + assertNeverCalled() then: def e = thrown(AssertException) @@ -53,7 +93,7 @@ class AssertTest extends Specification { def "assertShouldNeverHappen should always throw"() { when: - Assert.assertShouldNeverHappen() + assertShouldNeverHappen() then: def e = thrown(AssertException) @@ -62,7 +102,7 @@ class AssertTest extends Specification { def "assertShouldNeverHappen should always throw with formatted message"() { when: - Assert.assertShouldNeverHappen(format, arg) + assertShouldNeverHappen(format, arg) then: def error = thrown(AssertException) @@ -77,7 +117,7 @@ class AssertTest extends Specification { def "assertNotEmpty collection should throw on null or empty"() { when: - Assert.assertNotEmpty(value, { -> String.format(format, arg) }) + assertNotEmpty(value, { -> String.format(format, arg) }) then: def error = thrown(AssertException) @@ -91,7 +131,7 @@ class AssertTest extends Specification { def "assertNotEmpty should not throw on none empty collection"() { when: - Assert.assertNotEmpty(["some object"], { -> "error message"}) + assertNotEmpty(["some object"], { -> "error message" }) then: noExceptionThrown() @@ -99,7 +139,7 @@ class AssertTest extends Specification { def "assertTrue should not throw on true value"() { when: - Assert.assertTrue(true, { ->"error message"}) + assertTrue(true, { -> "error message" }) then: noExceptionThrown() @@ -107,7 +147,77 @@ class AssertTest extends Specification { def "assertTrue with error message should throw on false value with formatted message"() { when: - Assert.assertTrue(false, { -> String.format(format, arg) }) + assertTrue(false, { -> String.format(format, arg) }) + + then: + def error = thrown(AssertException) + error.message == expectedMessage + + where: + format | arg || expectedMessage + "error %s" | "msg" || "error msg" + "code %d" | 1 || "code 1" + "code" | null || "code" + } + + def "assertTrue constant message should throw with message"() { + when: + assertTrue(false, "constant message") + + then: + def error = thrown(AssertException) + error.message == "constant message" + } + + def "assertTrue with different number of error args throws assertions"() { + when: + toRun.run() + + then: + def error = thrown(AssertException) + error.message == expectedMessage + + where: + toRun | expectedMessage + runnable({ assertTrue(false, "error %s", "arg1") }) | "error arg1" + runnable({ assertTrue(false, "error %s %s", "arg1", "arg2") }) | "error arg1 arg2" + runnable({ assertTrue(false, "error %s %s %s", "arg1", "arg2", "arg3") }) | "error arg1 arg2 arg3" + } + + def "assertTrue with different number of error args but false does not throw assertions"() { + when: + toRun.run() + + then: + noExceptionThrown() + + where: + toRun | expectedMessage + runnable({ assertTrue(true, "error %s", "arg1") }) | "error arg1" + runnable({ assertTrue(true, "error %s %s", "arg1", "arg2") }) | "error arg1 arg2" + runnable({ assertTrue(true, "error %s %s %s", "arg1", "arg2", "arg3") }) | "error arg1 arg2 arg3" + } + + def "assertFalse should throw"() { + when: + assertFalse(true) + + then: + thrown(AssertException) + } + + def "assertFalse constant message should throw with message"() { + when: + assertFalse(true, "constant message") + + then: + def error = thrown(AssertException) + error.message == "constant message" + } + + def "assertFalse with error message should throw on false value with formatted message"() { + when: + assertFalse(true, { -> String.format(format, arg) }) then: def error = thrown(AssertException) @@ -120,9 +230,38 @@ class AssertTest extends Specification { "code" | null || "code" } + def "assertFalse with different number of error args throws assertions"() { + when: + toRun.run() + + then: + def error = thrown(AssertException) + error.message == expectedMessage + + where: + toRun | expectedMessage + runnable({ assertFalse(true, "error %s", "arg1") }) | "error arg1" + runnable({ assertFalse(true, "error %s %s", "arg1", "arg2") }) | "error arg1 arg2" + runnable({ assertFalse(true, "error %s %s %s", "arg1", "arg2", "arg3") }) | "error arg1 arg2 arg3" + } + + def "assertFalse with different number of error args but false does not throw assertions"() { + when: + toRun.run() + + then: + noExceptionThrown() + + where: + toRun | expectedMessage + runnable({ assertFalse(false, "error %s", "arg1") }) | "error arg1" + runnable({ assertFalse(false, "error %s %s", "arg1", "arg2") }) | "error arg1 arg2" + runnable({ assertFalse(false, "error %s %s %s", "arg1", "arg2", "arg3") }) | "error arg1 arg2 arg3" + } + def "assertValidName should not throw on valid names"() { when: - Assert.assertValidName(name) + assertValidName(name) then: noExceptionThrown() @@ -138,7 +277,7 @@ class AssertTest extends Specification { def "assertValidName should throw on invalid names"() { when: - Assert.assertValidName(name) + assertValidName(name) then: def error = thrown(AssertException) @@ -150,4 +289,10 @@ class AssertTest extends Specification { "���" | _ "_()" | _ } + + // Spock data tables cant cope with { x } syntax but it cna do this + Runnable runnable(Runnable r) { + return r + } + } diff --git a/src/test/groovy/graphql/ErrorsTest.groovy b/src/test/groovy/graphql/ErrorsTest.groovy index dfae441761..e346a26b47 100644 --- a/src/test/groovy/graphql/ErrorsTest.groovy +++ b/src/test/groovy/graphql/ErrorsTest.groovy @@ -43,9 +43,21 @@ class ErrorsTest extends Specification { def "ValidationError equals and hashcode works"() { expect: - def same1 = new ValidationError(ValidationErrorType.BadValueForDefaultArg, [src(15, 34), src(23, 567)], "bad ju ju") - def same2 = new ValidationError(ValidationErrorType.BadValueForDefaultArg, [src(15, 34), src(23, 567)], "bad ju ju") - def different1 = new ValidationError(ValidationErrorType.FieldsConflict, [src(15, 34), src(23, 567)], "bad ju ju") + def same1 = ValidationError.newValidationError() + .validationErrorType(ValidationErrorType.BadValueForDefaultArg) + .sourceLocations([src(15, 34), src(23, 567)]) + .description("bad ju ju") + .build() + def same2 = ValidationError.newValidationError() + .validationErrorType(ValidationErrorType.BadValueForDefaultArg) + .sourceLocations([src(15, 34), src(23, 567)]) + .description("bad ju ju") + .build() + def different1 = ValidationError.newValidationError() + .validationErrorType(ValidationErrorType.FieldsConflict) + .sourceLocations([src(15, 34), src(23, 567)]) + .description("bad ju ju") + .build() commonAssert(same1, same2, different1) } diff --git a/src/test/groovy/graphql/ExecutionInputTest.groovy b/src/test/groovy/graphql/ExecutionInputTest.groovy index f0b4b232fc..b3243e0d26 100644 --- a/src/test/groovy/graphql/ExecutionInputTest.groovy +++ b/src/test/groovy/graphql/ExecutionInputTest.groovy @@ -45,23 +45,6 @@ class ExecutionInputTest extends Specification { executionInput.graphQLContext.get("a") == "b" } - def "legacy context methods work"() { - // Retaining deprecated method tests for coverage - when: - def executionInput = ExecutionInput.newExecutionInput().query(query) - .context({ builder -> builder.of("k1", "v1") } as UnaryOperator) // Retain deprecated for test coverage - .build() - then: - (executionInput.context as GraphQLContext).get("k1") == "v1" // Retain deprecated for test coverage - - when: - executionInput = ExecutionInput.newExecutionInput().query(query) - .context(GraphQLContext.newContext().of("k2", "v2")) // Retain deprecated for test coverage - .build() - then: - (executionInput.context as GraphQLContext).get("k2") == "v2" // Retain deprecated for test coverage - } - def "legacy context is defaulted"() { // Retaining deprecated method tests for coverage when: diff --git a/src/test/groovy/graphql/GraphQLContextTest.groovy b/src/test/groovy/graphql/GraphQLContextTest.groovy index f409721363..8eebb17653 100644 --- a/src/test/groovy/graphql/GraphQLContextTest.groovy +++ b/src/test/groovy/graphql/GraphQLContextTest.groovy @@ -168,6 +168,52 @@ class GraphQLContextTest extends Specification { !context.hasKey("k3") } + def "compute works"() { + def context + when: + context = buildContext([k1: "foo"]) + then: + context.compute("k1", (k, v) -> v ? v + "bar" : "default") == "foobar" + context.get("k1") == "foobar" + context.compute("k2", (k, v) -> v ? "new" : "default") == "default" + context.get("k2") == "default" + !context.compute("k3", (k, v) -> null) + !context.hasKey("k3") + sizeOf(context) == 2 + } + + def "computeIfAbsent works"() { + def context + when: + context = buildContext([k1: "v1", k2: "v2"]) + then: + context.computeIfAbsent("k1", k -> "default") == "v1" + context.get("k1") == "v1" + context.computeIfAbsent("k2", k -> null) == "v2" + context.get("k2") == "v2" + context.computeIfAbsent("k3", k -> "default") == "default" + context.get("k3") == "default" + !context.computeIfAbsent("k4", k -> null) + !context.hasKey("k4") + sizeOf(context) == 3 + } + + def "computeIfPresent works"() { + def context + when: + context = buildContext([k1: "foo", k2: "v2"]) + then: + context.computeIfPresent("k1", (k, v) -> v + "bar") == "foobar" + context.get("k1") == "foobar" + !context.computeIfPresent("k2", (k, v) -> null) + !context.hasKey("k2") + !context.computeIfPresent("k3", (k, v) -> v + "bar") + !context.hasKey("k3") + !context.computeIfPresent("k4", (k, v) -> null) + !context.hasKey("k4") + sizeOf(context) == 1 + } + def "getOrDefault works"() { def context when: diff --git a/src/test/groovy/graphql/GraphQLErrorTest.groovy b/src/test/groovy/graphql/GraphQLErrorTest.groovy index 7560419be8..ca9fc1e8d7 100644 --- a/src/test/groovy/graphql/GraphQLErrorTest.groovy +++ b/src/test/groovy/graphql/GraphQLErrorTest.groovy @@ -25,7 +25,11 @@ class GraphQLErrorTest extends Specification { where: gError | expectedMap - new ValidationError(ValidationErrorType.UnknownType, mkLocations(), "Test ValidationError") | + ValidationError.newValidationError() + .validationErrorType(ValidationErrorType.UnknownType) + .sourceLocations(mkLocations()) + .description("Test ValidationError") + .build() | [ locations: [[line: 666, column: 999], [line: 333, column: 0]], message : "Test ValidationError", diff --git a/src/test/groovy/graphql/GraphQLTest.groovy b/src/test/groovy/graphql/GraphQLTest.groovy index ec2523d3f8..8235ac5235 100644 --- a/src/test/groovy/graphql/GraphQLTest.groovy +++ b/src/test/groovy/graphql/GraphQLTest.groovy @@ -13,12 +13,13 @@ import graphql.execution.ExecutionId import graphql.execution.ExecutionIdProvider import graphql.execution.ExecutionStrategyParameters import graphql.execution.MissingRootTypeException +import graphql.execution.ResultNodesInfo import graphql.execution.SubscriptionExecutionStrategy import graphql.execution.ValueUnboxer -import graphql.execution.instrumentation.ChainedInstrumentation import graphql.execution.instrumentation.Instrumentation +import graphql.execution.instrumentation.InstrumentationState import graphql.execution.instrumentation.SimplePerformantInstrumentation -import graphql.execution.instrumentation.dataloader.DataLoaderDispatcherInstrumentation +import graphql.execution.instrumentation.parameters.InstrumentationCreateStateParameters import graphql.execution.preparsed.NoOpPreparsedDocumentProvider import graphql.language.SourceLocation import graphql.schema.DataFetcher @@ -49,6 +50,7 @@ import static graphql.ExecutionInput.Builder import static graphql.ExecutionInput.newExecutionInput import static graphql.Scalars.GraphQLInt import static graphql.Scalars.GraphQLString +import static graphql.execution.ResultNodesInfo.MAX_RESULT_NODES import static graphql.schema.GraphQLArgument.newArgument import static graphql.schema.GraphQLFieldDefinition.newFieldDefinition import static graphql.schema.GraphQLInputObjectField.newInputObjectField @@ -950,8 +952,8 @@ class GraphQLTest extends Specification { then: result == [hello: 'world'] queryStrategy.executionId == hello - queryStrategy.instrumentation instanceof ChainedInstrumentation - (queryStrategy.instrumentation as ChainedInstrumentation).getInstrumentations().contains(instrumentation) + queryStrategy.instrumentation instanceof Instrumentation + queryStrategy.instrumentation == instrumentation when: @@ -973,12 +975,11 @@ class GraphQLTest extends Specification { then: result == [hello: 'world'] queryStrategy.executionId == goodbye - queryStrategy.instrumentation instanceof ChainedInstrumentation - (queryStrategy.instrumentation as ChainedInstrumentation).getInstrumentations().contains(newInstrumentation) - !(queryStrategy.instrumentation as ChainedInstrumentation).getInstrumentations().contains(instrumentation) + queryStrategy.instrumentation instanceof SimplePerformantInstrumentation + newGraphQL.instrumentation == newInstrumentation } - def "disabling data loader instrumentation leaves instrumentation as is"() { + def "provided instrumentation is unchanged"() { given: def queryStrategy = new CaptureStrategy() def instrumentation = new SimplePerformantInstrumentation() @@ -988,7 +989,6 @@ class GraphQLTest extends Specification { when: def graphql = builder - .doNotAddDefaultInstrumentations() .build() graphql.execute('{ hello }') @@ -996,38 +996,6 @@ class GraphQLTest extends Specification { queryStrategy.instrumentation == instrumentation } - def "a single DataLoader instrumentation leaves instrumentation as is"() { - given: - def queryStrategy = new CaptureStrategy() - def instrumentation = new DataLoaderDispatcherInstrumentation() - def builder = GraphQL.newGraphQL(simpleSchema()) - .queryExecutionStrategy(queryStrategy) - .instrumentation(instrumentation) - - when: - def graphql = builder - .build() - graphql.execute('{ hello }') - - then: - queryStrategy.instrumentation == instrumentation - } - - def "DataLoader instrumentation is the default instrumentation"() { - given: - def queryStrategy = new CaptureStrategy() - def builder = GraphQL.newGraphQL(simpleSchema()) - .queryExecutionStrategy(queryStrategy) - - when: - def graphql = builder - .build() - graphql.execute('{ hello }') - - then: - queryStrategy.instrumentation instanceof DataLoaderDispatcherInstrumentation - } - def "query with triple quoted multi line strings"() { given: def queryType = "Query" @@ -1063,6 +1031,28 @@ over many lines'''] } + def "executionId is set before being passed to instrumentation"() { + InstrumentationCreateStateParameters seenParams + + def instrumentation = new Instrumentation() { + + @Override + CompletableFuture createStateAsync(InstrumentationCreateStateParameters params) { + seenParams = params + null + } + } + + when: + GraphQL.newGraphQL(StarWarsSchema.starWarsSchema) + .instrumentation(instrumentation) + .build() + .execute("{ __typename }") + + then: + seenParams.executionInput.executionId != null + } + def "variables map can't be null via ExecutionInput"() { given: @@ -1424,7 +1414,7 @@ many lines'''] graphQL.getIdProvider() == ExecutionIdProvider.DEFAULT_EXECUTION_ID_PROVIDER graphQL.getValueUnboxer() == ValueUnboxer.DEFAULT graphQL.getPreparsedDocumentProvider() == NoOpPreparsedDocumentProvider.INSTANCE - graphQL.getInstrumentation() instanceof ChainedInstrumentation + graphQL.getInstrumentation() instanceof Instrumentation graphQL.getQueryStrategy() instanceof AsyncExecutionStrategy graphQL.getMutationStrategy() instanceof AsyncSerialExecutionStrategy graphQL.getSubscriptionStrategy() instanceof SubscriptionExecutionStrategy @@ -1440,4 +1430,143 @@ many lines'''] then: !er.errors.isEmpty() } + + def "max result nodes not breached"() { + given: + def sdl = ''' + + type Query { + hello: String + } + ''' + def df = { env -> "world" } as DataFetcher + def fetchers = ["Query": ["hello": df]] + def schema = TestUtil.schema(sdl, fetchers) + def graphQL = GraphQL.newGraphQL(schema).build() + + def query = "{ hello h1: hello h2: hello h3: hello } " + def ei = newExecutionInput(query).build() + ei.getGraphQLContext().put(MAX_RESULT_NODES, 4); + + when: + def er = graphQL.execute(ei) + def rni = ei.getGraphQLContext().get(ResultNodesInfo.RESULT_NODES_INFO) as ResultNodesInfo + then: + !rni.maxResultNodesExceeded + rni.resultNodesCount == 4 + er.data == [hello: "world", h1: "world", h2: "world", h3: "world"] + } + + def "max result nodes breached"() { + given: + def sdl = ''' + + type Query { + hello: String + } + ''' + def df = { env -> "world" } as DataFetcher + def fetchers = ["Query": ["hello": df]] + def schema = TestUtil.schema(sdl, fetchers) + def graphQL = GraphQL.newGraphQL(schema).build() + + def query = "{ hello h1: hello h2: hello h3: hello } " + def ei = newExecutionInput(query).build() + ei.getGraphQLContext().put(MAX_RESULT_NODES, 3); + + when: + def er = graphQL.execute(ei) + def rni = ei.getGraphQLContext().get(ResultNodesInfo.RESULT_NODES_INFO) as ResultNodesInfo + then: + rni.maxResultNodesExceeded + rni.resultNodesCount == 4 + er.data == [hello: "world", h1: "world", h2: "world", h3: null] + } + + def "max result nodes breached with list"() { + given: + def sdl = ''' + + type Query { + hello: [String] + } + ''' + def df = { env -> ["w1", "w2", "w3"] } as DataFetcher + def fetchers = ["Query": ["hello": df]] + def schema = TestUtil.schema(sdl, fetchers) + def graphQL = GraphQL.newGraphQL(schema).build() + + def query = "{ hello}" + def ei = newExecutionInput(query).build() + ei.getGraphQLContext().put(MAX_RESULT_NODES, 3); + + when: + def er = graphQL.execute(ei) + def rni = ei.getGraphQLContext().get(ResultNodesInfo.RESULT_NODES_INFO) as ResultNodesInfo + then: + rni.maxResultNodesExceeded + rni.resultNodesCount == 4 + er.data == [hello: null] + } + + def "max result nodes breached with list 2"() { + given: + def sdl = ''' + + type Query { + hello: [Foo] + } + type Foo { + name: String + } + ''' + def df = { env -> [[name: "w1"], [name: "w2"], [name: "w3"]] } as DataFetcher + def fetchers = ["Query": ["hello": df]] + def schema = TestUtil.schema(sdl, fetchers) + def graphQL = GraphQL.newGraphQL(schema).build() + + def query = "{ hello {name}}" + def ei = newExecutionInput(query).build() + // we have 7 result nodes overall + ei.getGraphQLContext().put(MAX_RESULT_NODES, 6); + + when: + def er = graphQL.execute(ei) + def rni = ei.getGraphQLContext().get(ResultNodesInfo.RESULT_NODES_INFO) as ResultNodesInfo + then: + rni.resultNodesCount == 7 + rni.maxResultNodesExceeded + er.data == [hello: [[name: "w1"], [name: "w2"], [name: null]]] + } + + def "max result nodes not breached with list"() { + given: + def sdl = ''' + + type Query { + hello: [Foo] + } + type Foo { + name: String + } + ''' + def df = { env -> [[name: "w1"], [name: "w2"], [name: "w3"]] } as DataFetcher + def fetchers = ["Query": ["hello": df]] + def schema = TestUtil.schema(sdl, fetchers) + def graphQL = GraphQL.newGraphQL(schema).build() + + def query = "{ hello {name}}" + def ei = newExecutionInput(query).build() + // we have 7 result nodes overall + ei.getGraphQLContext().put(MAX_RESULT_NODES, 7); + + when: + def er = graphQL.execute(ei) + def rni = ei.getGraphQLContext().get(ResultNodesInfo.RESULT_NODES_INFO) as ResultNodesInfo + then: + !rni.maxResultNodesExceeded + rni.resultNodesCount == 7 + er.data == [hello: [[name: "w1"], [name: "w2"], [name: "w3"]]] + } + } diff --git a/src/test/groovy/graphql/GraphqlErrorHelperTest.groovy b/src/test/groovy/graphql/GraphqlErrorHelperTest.groovy index 34684b6ce6..88bfecb444 100644 --- a/src/test/groovy/graphql/GraphqlErrorHelperTest.groovy +++ b/src/test/groovy/graphql/GraphqlErrorHelperTest.groovy @@ -65,7 +65,11 @@ class GraphqlErrorHelperTest extends Specification { def "can turn error classifications into extensions"() { - def validationErr = new ValidationError(ValidationErrorType.InvalidFragmentType, new SourceLocation(6, 9), "Things are not valid") + def validationErr = ValidationError.newValidationError() + .validationErrorType(ValidationErrorType.InvalidFragmentType) + .sourceLocation(new SourceLocation(6, 9)) + .description("Things are not valid") + .build() when: def specMap = GraphqlErrorHelper.toSpecification(validationErr) diff --git a/src/test/groovy/graphql/InterfacesImplementingInterfacesTest.groovy b/src/test/groovy/graphql/InterfacesImplementingInterfacesTest.groovy index b5813c4d0b..bb22d70461 100644 --- a/src/test/groovy/graphql/InterfacesImplementingInterfacesTest.groovy +++ b/src/test/groovy/graphql/InterfacesImplementingInterfacesTest.groovy @@ -893,8 +893,10 @@ class InterfacesImplementingInterfacesTest extends Specification { given: def graphQLSchema = createComplexSchema() + GraphQL graphQL = GraphQL.newGraphQL(graphQLSchema).build() + when: - def result = GraphQL.newGraphQL(graphQLSchema).build().execute(""" + String query = """ { nodeType: __type(name: "Node") { possibleTypes { @@ -902,7 +904,20 @@ class InterfacesImplementingInterfacesTest extends Specification { name } } - resourceType: __type(name: "Resource") { + } + """ + def result = graphQL.execute(query) + + then: + !result.errors + result.data == [ + nodeType: [possibleTypes: [[kind: 'OBJECT', name: 'File'], [kind: 'OBJECT', name: 'Image']]], + ] + + when: + query = """ + { + resourceType: __type(name: "Resource") { possibleTypes { kind name @@ -911,22 +926,35 @@ class InterfacesImplementingInterfacesTest extends Specification { kind name } - } - imageType: __type(name: "Image") { + } + } + """ + result = graphQL.execute(query) + + then: + !result.errors + result.data == [ + resourceType: [possibleTypes: [[kind: 'OBJECT', name: 'File'], [kind: 'OBJECT', name: 'Image']], interfaces: [[kind: 'INTERFACE', name: 'Node']]] + ] + + when: + + query = """ + { + imageType: __type(name: "Image") { interfaces { kind name } } - } - """) + } + """ + result = graphQL.execute(query) then: !result.errors result.data == [ - nodeType : [possibleTypes: [[kind: 'OBJECT', name: 'File'], [kind: 'OBJECT', name: 'Image']]], imageType : [interfaces: [[kind: 'INTERFACE', name: 'Resource'], [kind: 'INTERFACE', name: 'Node']]], - resourceType: [possibleTypes: [[kind: 'OBJECT', name: 'File'], [kind: 'OBJECT', name: 'Image']], interfaces: [[kind: 'INTERFACE', name: 'Node']]] ] } diff --git a/src/test/groovy/graphql/Issue2068.groovy b/src/test/groovy/graphql/Issue2068.groovy index a111e425d1..3273eab2cf 100644 --- a/src/test/groovy/graphql/Issue2068.groovy +++ b/src/test/groovy/graphql/Issue2068.groovy @@ -1,7 +1,6 @@ package graphql -import graphql.execution.instrumentation.ChainedInstrumentation -import graphql.execution.instrumentation.dataloader.DataLoaderDispatcherInstrumentation + import graphql.schema.DataFetcher import graphql.schema.DataFetchingEnvironment import graphql.schema.StaticDataFetcher @@ -12,7 +11,6 @@ import org.dataloader.DataLoader import org.dataloader.DataLoaderOptions import org.dataloader.DataLoaderRegistry import spock.lang.Specification -import spock.lang.Timeout import java.util.concurrent.CompletableFuture import java.util.concurrent.CompletionStage @@ -21,8 +19,8 @@ import java.util.concurrent.ThreadFactory import java.util.concurrent.ThreadPoolExecutor import java.util.concurrent.TimeUnit -import static graphql.schema.idl.TypeRuntimeWiring.newTypeWiring import static graphql.ExecutionInput.newExecutionInput +import static graphql.schema.idl.TypeRuntimeWiring.newTypeWiring class Issue2068 extends Specification { def "shouldn't hang on exception in resolveFieldWithInfo"() { @@ -95,7 +93,6 @@ class Issue2068 extends Specification { when: def graphql = GraphQL.newGraphQL(schema) - .instrumentation(new DataLoaderDispatcherInstrumentation()) .build() DataLoaderRegistry dataLoaderRegistry = mkNewDataLoaderRegistry(executor) @@ -127,9 +124,6 @@ class Issue2068 extends Specification { when: graphql = GraphQL.newGraphQL(schema) - .instrumentation(new ChainedInstrumentation( - Collections.singletonList(new DataLoaderDispatcherInstrumentation())) - ) .build() graphql.execute(newExecutionInput() diff --git a/src/test/groovy/graphql/Issue3434.groovy b/src/test/groovy/graphql/Issue3434.groovy new file mode 100644 index 0000000000..4671c57ff8 --- /dev/null +++ b/src/test/groovy/graphql/Issue3434.groovy @@ -0,0 +1,26 @@ +package graphql + +import static graphql.schema.GraphQLUnionType.newUnionType +import static graphql.schema.GraphQLTypeReference.typeRef +import graphql.schema.idl.SchemaPrinter + +import spock.lang.Specification + +class Issue3434 extends Specification { + + def "allow printing of union types"() { + given: + def schema = newUnionType().name("Shape") + .possibleType(typeRef("Circle")) + .possibleType(typeRef("Square")) + .build() + + when: + def printer = new SchemaPrinter() + def result = printer.print(schema) + + then: + result.trim() == "union Shape = Circle | Square" + } +} + diff --git a/src/test/groovy/graphql/ScalarsBooleanTest.groovy b/src/test/groovy/graphql/ScalarsBooleanTest.groovy index a045d1249d..55351f7f2b 100644 --- a/src/test/groovy/graphql/ScalarsBooleanTest.groovy +++ b/src/test/groovy/graphql/ScalarsBooleanTest.groovy @@ -55,19 +55,19 @@ class ScalarsBooleanTest extends Specification { Scalars.GraphQLBoolean.getCoercing().serialize(value, GraphQLContext.default, Locale.default) == result where: - value | result - true | true - "false" | false - "true" | true - "True" | true - 0 | false - 1 | true - -1 | true - new Long(42345784398534785l) | true - new Double(42.3) | true - new Float(42.3) | true - Integer.MAX_VALUE + 1l | true - Integer.MIN_VALUE - 1l | true + value | result + true | true + "false" | false + "true" | true + "True" | true + 0 | false + 1 | true + -1 | true + Long.valueOf(42345784398534785l) | true + Double.valueOf(42.3) | true + Float.valueOf(42.3) | true + Integer.MAX_VALUE + 1l | true + Integer.MIN_VALUE - 1l | true } @Unroll @@ -76,19 +76,19 @@ class ScalarsBooleanTest extends Specification { Scalars.GraphQLBoolean.getCoercing().serialize(value) == result // Retain deprecated method for test coverage where: - value | result - true | true - "false" | false - "true" | true - "True" | true - 0 | false - 1 | true - -1 | true - new Long(42345784398534785l) | true - new Double(42.3) | true - new Float(42.3) | true - Integer.MAX_VALUE + 1l | true - Integer.MIN_VALUE - 1l | true + value | result + true | true + "false" | false + "true" | true + "True" | true + 0 | false + 1 | true + -1 | true + Long.valueOf(42345784398534785l) | true + Double.valueOf(42.3) | true + Float.valueOf(42.3) | true + Integer.MAX_VALUE + 1l | true + Integer.MIN_VALUE - 1l | true } @Unroll @@ -131,27 +131,6 @@ class ScalarsBooleanTest extends Specification { false | false } - @Unroll - def "parseValue parses non-Boolean input #value"() { - expect: - Scalars.GraphQLBoolean.getCoercing().parseValue(value, GraphQLContext.default, Locale.default) == result - - where: - value | result - true | true - "false" | false - "true" | true - "True" | true - 0 | false - 1 | true - -1 | true - new Long(42345784398534785l) | true - new Double(42.3) | true - new Float(42.3) | true - Integer.MAX_VALUE + 1l | true - Integer.MIN_VALUE - 1l | true - } - @Unroll def "parseValue throws exception for invalid input #value"() { when: @@ -160,8 +139,19 @@ class ScalarsBooleanTest extends Specification { thrown(CoercingParseValueException) where: - value | _ - new Object() | _ + value | _ + new Object() | _ + "false" | _ + "true" | _ + "True" | _ + 0 | _ + 1 | _ + -1 | _ + Long.valueOf(42345784398534785l) | _ + Double.valueOf(42.3) | _ + Float.valueOf(42.3) | _ + Integer.MAX_VALUE + 1l | _ + Integer.MIN_VALUE - 1l | _ } } diff --git a/src/test/groovy/graphql/ScalarsFloatTest.groovy b/src/test/groovy/graphql/ScalarsFloatTest.groovy index 18846f788e..744fcbac03 100644 --- a/src/test/groovy/graphql/ScalarsFloatTest.groovy +++ b/src/test/groovy/graphql/ScalarsFloatTest.groovy @@ -64,15 +64,15 @@ class ScalarsFloatTest extends Specification { "42" | 42d "42.123" | 42.123d 42.0000d | 42 - new Integer(42) | 42 + Integer.valueOf(42) | 42 "-1" | -1 new BigInteger("42") | 42 new BigDecimal("42") | 42 new BigDecimal("4.2") | 4.2d 42.3f | 42.3d 42.0d | 42d - new Byte("42") | 42 - new Short("42") | 42 + Byte.valueOf("42") | 42 + Short.valueOf("42") | 42 1234567l | 1234567d new AtomicInteger(42) | 42 Double.MAX_VALUE | Double.MAX_VALUE @@ -89,15 +89,15 @@ class ScalarsFloatTest extends Specification { "42" | 42d "42.123" | 42.123d 42.0000d | 42 - new Integer(42) | 42 + Integer.valueOf(42) | 42 "-1" | -1 new BigInteger("42") | 42 new BigDecimal("42") | 42 new BigDecimal("4.2") | 4.2d 42.3f | 42.3d 42.0d | 42d - new Byte("42") | 42 - new Short("42") | 42 + Byte.valueOf("42") | 42 + Short.valueOf("42") | 42 1234567l | 1234567d new AtomicInteger(42) | 42 Double.MAX_VALUE | Double.MAX_VALUE @@ -137,21 +137,18 @@ class ScalarsFloatTest extends Specification { where: value | result 42.0000d | 42 - new Integer(42) | 42 + Integer.valueOf(42) | 42 new BigInteger("42") | 42 new BigDecimal("42") | 42 new BigDecimal("4.2") | 4.2d 42.3f | 42.3d 42.0d | 42d - new Byte("42") | 42 - new Short("42") | 42 + Byte.valueOf("42") | 42 + Short.valueOf("42") | 42 1234567l | 1234567d new AtomicInteger(42) | 42 Double.MAX_VALUE | Double.MAX_VALUE Double.MIN_VALUE | Double.MIN_VALUE - "42" | 42d - "42.123" | 42.123d - "-1" | -1 } @Unroll @@ -162,21 +159,18 @@ class ScalarsFloatTest extends Specification { where: value | result 42.0000d | 42 - new Integer(42) | 42 + Integer.valueOf(42) | 42 new BigInteger("42") | 42 new BigDecimal("42") | 42 new BigDecimal("4.2") | 4.2d 42.3f | 42.3d 42.0d | 42d - new Byte("42") | 42 - new Short("42") | 42 + Byte.valueOf("42") | 42 + Short.valueOf("42") | 42 1234567l | 1234567d new AtomicInteger(42) | 42 Double.MAX_VALUE | Double.MAX_VALUE Double.MIN_VALUE | Double.MIN_VALUE - "42" | 42d - "42.123" | 42.123d - "-1" | -1 } @@ -203,6 +197,9 @@ class ScalarsFloatTest extends Specification { Float.POSITIVE_INFINITY.toString() | _ Float.NEGATIVE_INFINITY | _ Float.NEGATIVE_INFINITY.toString() | _ + "42" | _ + "42.123" | _ + "-1" | _ } } diff --git a/src/test/groovy/graphql/ScalarsIntTest.groovy b/src/test/groovy/graphql/ScalarsIntTest.groovy index a38de1a49f..42c7c6887f 100644 --- a/src/test/groovy/graphql/ScalarsIntTest.groovy +++ b/src/test/groovy/graphql/ScalarsIntTest.groovy @@ -66,14 +66,14 @@ class ScalarsIntTest extends Specification { "42" | 42 "42.0000" | 42 42.0000d | 42 - new Integer(42) | 42 + Integer.valueOf(42) | 42 "-1" | -1 new BigInteger("42") | 42 new BigDecimal("42") | 42 42.0f | 42 42.0d | 42 - new Byte("42") | 42 - new Short("42") | 42 + Byte.valueOf("42") | 42 + Short.valueOf("42") | 42 1234567l | 1234567 new AtomicInteger(42) | 42 Integer.MAX_VALUE | Integer.MAX_VALUE @@ -90,14 +90,14 @@ class ScalarsIntTest extends Specification { "42" | 42 "42.0000" | 42 42.0000d | 42 - new Integer(42) | 42 + Integer.valueOf(42) | 42 "-1" | -1 new BigInteger("42") | 42 new BigDecimal("42") | 42 42.0f | 42 42.0d | 42 - new Byte("42") | 42 - new Short("42") | 42 + Byte.valueOf("42") | 42 + Short.valueOf("42") | 42 1234567l | 1234567 new AtomicInteger(42) | 42 Integer.MAX_VALUE | Integer.MAX_VALUE @@ -112,16 +112,16 @@ class ScalarsIntTest extends Specification { thrown(CoercingSerializeException) where: - value | _ - "" | _ - "not a number " | _ - "42.3" | _ - new Long(42345784398534785l) | _ - new Double(42.3) | _ - new Float(42.3) | _ - Integer.MAX_VALUE + 1l | _ - Integer.MIN_VALUE - 1l | _ - new Object() | _ + value | _ + "" | _ + "not a number " | _ + "42.3" | _ + Long.valueOf(42345784398534785l) | _ + Double.valueOf(42.3) | _ + Float.valueOf(42.3) | _ + Integer.MAX_VALUE + 1l | _ + Integer.MIN_VALUE - 1l | _ + new Object() | _ } @Unroll @@ -131,10 +131,10 @@ class ScalarsIntTest extends Specification { where: value | result - new Integer(42) | 42 + Integer.valueOf(42) | 42 new BigInteger("42") | 42 - new Byte("42") | 42 - new Short("42") | 42 + Byte.valueOf("42") | 42 + Short.valueOf("42") | 42 1234567l | 1234567 new AtomicInteger(42) | 42 42.0000d | 42 @@ -143,9 +143,6 @@ class ScalarsIntTest extends Specification { 42.0d | 42 Integer.MAX_VALUE | Integer.MAX_VALUE Integer.MIN_VALUE | Integer.MIN_VALUE - "42" | 42 - "42.0000" | 42 - "-1" | -1 } @Unroll @@ -155,10 +152,10 @@ class ScalarsIntTest extends Specification { where: value | result - new Integer(42) | 42 + Integer.valueOf(42) | 42 new BigInteger("42") | 42 - new Byte("42") | 42 - new Short("42") | 42 + Byte.valueOf("42") | 42 + Short.valueOf("42") | 42 1234567l | 1234567 new AtomicInteger(42) | 42 42.0000d | 42 @@ -167,9 +164,6 @@ class ScalarsIntTest extends Specification { 42.0d | 42 Integer.MAX_VALUE | Integer.MAX_VALUE Integer.MIN_VALUE | Integer.MIN_VALUE - "42" | 42 - "42.0000" | 42 - "-1" | -1 } @Unroll @@ -180,16 +174,19 @@ class ScalarsIntTest extends Specification { thrown(CoercingParseValueException) where: - value | _ - "" | _ - "not a number " | _ - "42.3" | _ - new Long(42345784398534785l) | _ - new Double(42.3) | _ - new Float(42.3) | _ - Integer.MAX_VALUE + 1l | _ - Integer.MIN_VALUE - 1l | _ - new Object() | _ + value | _ + "" | _ + "not a number " | _ + "42.3" | _ + Long.valueOf(42345784398534785l) | _ + Double.valueOf(42.3) | _ + Float.valueOf(42.3) | _ + Integer.MAX_VALUE + 1l | _ + Integer.MIN_VALUE - 1l | _ + new Object() | _ + "42" | _ + "42.0000" | _ + "-1" | _ } } diff --git a/src/test/groovy/graphql/ScalarsStringTest.groovy b/src/test/groovy/graphql/ScalarsStringTest.groovy index 536dd07216..cbd00f97d9 100644 --- a/src/test/groovy/graphql/ScalarsStringTest.groovy +++ b/src/test/groovy/graphql/ScalarsStringTest.groovy @@ -4,6 +4,7 @@ import graphql.execution.CoercedVariables import graphql.language.BooleanValue import graphql.language.StringValue import graphql.schema.CoercingParseLiteralException +import graphql.schema.CoercingParseValueException import spock.lang.Shared import spock.lang.Specification import spock.lang.Unroll @@ -85,15 +86,24 @@ class ScalarsStringTest extends Specification { } @Unroll - def "String parseValue can parse non-String values"() { - expect: - Scalars.GraphQLString.getCoercing().parseValue(value, GraphQLContext.default, Locale.default) == result + def "String parseValue throws exception for non-String values"() { + when: + Scalars.GraphQLString.getCoercing().parseValue(value, GraphQLContext.default, Locale.default) + then: + thrown(CoercingParseValueException) where: - value | result - 123 | "123" - true | "true" - customObject | "foo" + value | _ + 123 | _ + true | _ + customObject | _ } + def "String parseValue English exception message"() { + when: + Scalars.GraphQLString.getCoercing().parseValue(9001, GraphQLContext.default, Locale.ENGLISH) + then: + def ex = thrown(CoercingParseValueException) + ex.message == "Expected a String input, but it was a 'Integer'" + } } diff --git a/src/test/groovy/graphql/TestUtil.groovy b/src/test/groovy/graphql/TestUtil.groovy index 35a2ae68b2..490e7cee93 100644 --- a/src/test/groovy/graphql/TestUtil.groovy +++ b/src/test/groovy/graphql/TestUtil.groovy @@ -2,6 +2,7 @@ package graphql import graphql.execution.MergedField import graphql.execution.MergedSelectionSet +import graphql.introspection.Introspection.DirectiveLocation import graphql.language.Document import graphql.language.Field import graphql.language.NullValue @@ -12,8 +13,8 @@ import graphql.language.Type import graphql.parser.Parser import graphql.schema.Coercing import graphql.schema.DataFetcher -import graphql.schema.GraphQLAppliedDirectiveArgument import graphql.schema.GraphQLAppliedDirective +import graphql.schema.GraphQLAppliedDirectiveArgument import graphql.schema.GraphQLArgument import graphql.schema.GraphQLDirective import graphql.schema.GraphQLInputType @@ -194,13 +195,19 @@ class TestUtil { .name(definition.getName()) .description(definition.getDescription() == null ? null : definition.getDescription().getContent()) .coercing(mockCoercing()) - .replaceDirectives(definition.getDirectives().stream().map({ mockDirective(it.getName()) }).collect(Collectors.toList())) + .replaceDirectives( + definition.getDirectives() + .stream() + .map({ mkDirective(it.getName(), DirectiveLocation.SCALAR) }) + .collect(Collectors.toList())) .definition(definition) .build() } - static GraphQLDirective mockDirective(String name) { - newDirective().name(name).description(name).build() + static GraphQLDirective mkDirective(String name, DirectiveLocation location, GraphQLArgument arg = null) { + def b = newDirective().name(name).description(name).validLocation(location) + if (arg != null) b.argument(arg) + b.build() } static TypeRuntimeWiring mockTypeRuntimeWiring(String typeName, boolean withResolver) { diff --git a/src/test/groovy/graphql/UnionTest.groovy b/src/test/groovy/graphql/UnionTest.groovy index 403f31d3d8..8edd7b2600 100644 --- a/src/test/groovy/graphql/UnionTest.groovy +++ b/src/test/groovy/graphql/UnionTest.groovy @@ -4,7 +4,7 @@ import spock.lang.Specification class UnionTest extends Specification { - def "can introspect on union and intersection types"() { + def "can introspect on union types"() { def query = """ { Named: __type(name: "Named") { @@ -15,15 +15,6 @@ class UnionTest extends Specification { possibleTypes { name } enumValues { name } inputFields { name } - } - Pet: __type(name: "Pet") { - kind - name - fields { name } - interfaces { name } - possibleTypes { name } - enumValues { name } - inputFields { name } } } """ @@ -42,8 +33,32 @@ class UnionTest extends Specification { ], enumValues : null, inputFields : null - ], - Pet : [ + ]] + when: + def executionResult = GraphQL.newGraphQL(GarfieldSchema.GarfieldSchema).build().execute(query) + + then: + executionResult.data == expectedResult + + + } + + def "can introspect on intersection types"() { + def query = """ + { + Pet: __type(name: "Pet") { + kind + name + fields { name } + interfaces { name } + possibleTypes { name } + enumValues { name } + inputFields { name } + } + } + """ + + def expectedResult = [Pet : [ kind : 'UNION', name : 'Pet', fields : null, diff --git a/src/test/groovy/graphql/analysis/MaxQueryComplexityInstrumentationTest.groovy b/src/test/groovy/graphql/analysis/MaxQueryComplexityInstrumentationTest.groovy index cee882ccae..798b9e5512 100644 --- a/src/test/groovy/graphql/analysis/MaxQueryComplexityInstrumentationTest.groovy +++ b/src/test/groovy/graphql/analysis/MaxQueryComplexityInstrumentationTest.groovy @@ -167,7 +167,7 @@ class MaxQueryComplexityInstrumentationTest extends Specification { private InstrumentationExecuteOperationParameters createExecuteOperationParameters(MaxQueryComplexityInstrumentation queryComplexityInstrumentation, ExecutionInput executionInput, Document query, GraphQLSchema schema, InstrumentationState state) { // we need to run N steps to create instrumentation state - def validationParameters = new InstrumentationValidationParameters(executionInput, query, schema, state) + def validationParameters = new InstrumentationValidationParameters(executionInput, query, schema) queryComplexityInstrumentation.beginValidation(validationParameters, state) def executionContext = executionCtx(executionInput, query, schema) def executeOperationParameters = new InstrumentationExecuteOperationParameters(executionContext) @@ -175,7 +175,7 @@ class MaxQueryComplexityInstrumentationTest extends Specification { } def createInstrumentationState(MaxQueryComplexityInstrumentation queryComplexityInstrumentation) { - queryComplexityInstrumentation.createState(null) + queryComplexityInstrumentation.createStateAsync(null).join() } diff --git a/src/test/groovy/graphql/analysis/MaxQueryDepthInstrumentationTest.groovy b/src/test/groovy/graphql/analysis/MaxQueryDepthInstrumentationTest.groovy index 942bee10dc..acc1a08983 100644 --- a/src/test/groovy/graphql/analysis/MaxQueryDepthInstrumentationTest.groovy +++ b/src/test/groovy/graphql/analysis/MaxQueryDepthInstrumentationTest.groovy @@ -67,7 +67,7 @@ class MaxQueryDepthInstrumentationTest extends Specification { ExecutionInput executionInput = Mock(ExecutionInput) def executionContext = executionCtx(executionInput, query, schema) def executeOperationParameters = new InstrumentationExecuteOperationParameters(executionContext) - def state = maximumQueryDepthInstrumentation.createState(null) + def state = null // it has not state in implementation when: maximumQueryDepthInstrumentation.beginExecuteOperation(executeOperationParameters, state) then: diff --git a/src/test/groovy/graphql/execution/AsyncExecutionStrategyTest.groovy b/src/test/groovy/graphql/execution/AsyncExecutionStrategyTest.groovy index 951246df0c..8769bb79a4 100644 --- a/src/test/groovy/graphql/execution/AsyncExecutionStrategyTest.groovy +++ b/src/test/groovy/graphql/execution/AsyncExecutionStrategyTest.groovy @@ -2,6 +2,7 @@ package graphql.execution import graphql.ErrorType import graphql.ExecutionResult +import graphql.ExperimentalApi import graphql.GraphQLContext import graphql.execution.instrumentation.ExecutionStrategyInstrumentationContext import graphql.execution.instrumentation.InstrumentationState @@ -22,6 +23,7 @@ import java.util.concurrent.CompletionException import java.util.concurrent.atomic.AtomicInteger import java.util.concurrent.locks.ReentrantLock +import static graphql.ExperimentalApi.ENABLE_INCREMENTAL_SUPPORT import static graphql.Scalars.GraphQLString import static graphql.TestUtil.mergedField import static graphql.TestUtil.mergedSelectionSet @@ -30,7 +32,10 @@ import static graphql.schema.GraphQLObjectType.newObject import static graphql.schema.GraphQLSchema.newSchema import static org.awaitility.Awaitility.await -class AsyncExecutionStrategyTest extends Specification { +abstract class AsyncExecutionStrategyTest extends Specification { + static boolean incrementalSupport + + def graphqlContextMock = Mock(GraphQLContext) GraphQLSchema schema(DataFetcher dataFetcher1, DataFetcher dataFetcher2) { def queryName = "RootQueryType" @@ -65,6 +70,10 @@ class AsyncExecutionStrategyTest extends Specification { schema } + def setup() { + graphqlContextMock.get(ENABLE_INCREMENTAL_SUPPORT) >> incrementalSupport + } + def "execution is serial if the dataFetchers are blocking"() { given: def lock = new ReentrantLock() @@ -98,7 +107,7 @@ class AsyncExecutionStrategyTest extends Specification { .operationDefinition(operation) .instrumentation(SimplePerformantInstrumentation.INSTANCE) .valueUnboxer(ValueUnboxer.DEFAULT) - .graphQLContext(GraphQLContext.getDefault()) + .graphQLContext(graphqlContextMock) .locale(Locale.getDefault()) .build() ExecutionStrategyParameters executionStrategyParameters = ExecutionStrategyParameters @@ -140,7 +149,7 @@ class AsyncExecutionStrategyTest extends Specification { .valueUnboxer(ValueUnboxer.DEFAULT) .instrumentation(SimplePerformantInstrumentation.INSTANCE) .locale(Locale.getDefault()) - .graphQLContext(GraphQLContext.getDefault()) + .graphQLContext(graphqlContextMock) .build() ExecutionStrategyParameters executionStrategyParameters = ExecutionStrategyParameters .newParameters() @@ -182,7 +191,7 @@ class AsyncExecutionStrategyTest extends Specification { .operationDefinition(operation) .valueUnboxer(ValueUnboxer.DEFAULT) .instrumentation(SimplePerformantInstrumentation.INSTANCE) - .graphQLContext(GraphQLContext.getDefault()) + .graphQLContext(graphqlContextMock) .locale(Locale.getDefault()) .build() ExecutionStrategyParameters executionStrategyParameters = ExecutionStrategyParameters @@ -225,7 +234,7 @@ class AsyncExecutionStrategyTest extends Specification { .instrumentation(SimplePerformantInstrumentation.INSTANCE) .valueUnboxer(ValueUnboxer.DEFAULT) .locale(Locale.getDefault()) - .graphQLContext(GraphQLContext.getDefault()) + .graphQLContext(graphqlContextMock) .build() ExecutionStrategyParameters executionStrategyParameters = ExecutionStrategyParameters .newParameters() @@ -264,7 +273,7 @@ class AsyncExecutionStrategyTest extends Specification { .executionId(ExecutionId.generate()) .operationDefinition(operation) .valueUnboxer(ValueUnboxer.DEFAULT) - .graphQLContext(GraphQLContext.getDefault()) + .graphQLContext(graphqlContextMock) .locale(Locale.getDefault()) .instrumentation(new SimplePerformantInstrumentation() { @@ -278,7 +287,7 @@ class AsyncExecutionStrategyTest extends Specification { } @Override - void onDispatched(CompletableFuture result) { + void onDispatched() { } @Override @@ -311,3 +320,15 @@ class AsyncExecutionStrategyTest extends Specification { } + +class AsyncExecutionStrategyTestWithIncrementalSupport extends AsyncExecutionStrategyTest { + static { + incrementalSupport = true + } +} + +class AsyncExecutionStrategyTestNoIncrementalSupport extends AsyncExecutionStrategyTest { + static { + incrementalSupport = false + } +} diff --git a/src/test/groovy/graphql/execution/AsyncTest.groovy b/src/test/groovy/graphql/execution/AsyncTest.groovy index e124f00220..2661c4f5fd 100644 --- a/src/test/groovy/graphql/execution/AsyncTest.groovy +++ b/src/test/groovy/graphql/execution/AsyncTest.groovy @@ -1,11 +1,12 @@ +//file:noinspection GroovyVariableNotAssigned package graphql.execution import spock.lang.Specification import java.util.concurrent.CompletableFuture import java.util.concurrent.CompletionException -import java.util.function.Function import java.util.function.BiFunction +import java.util.function.Function import static java.util.concurrent.CompletableFuture.completedFuture @@ -48,6 +49,43 @@ class AsyncTest extends Specification { result.get() == ['x', 'y', 'z'] } + def "eachSequentially polymorphic test"() { + given: + def input = ['a', 'b', 'c', 'd'] + def cfFactory = Mock(BiFunction) + def cf1 = new CompletableFuture() + def v2 = 'y' + def cf3 = new CompletableFuture() + + when: + def result = Async.eachSequentially(input, cfFactory) + + then: + !result.isDone() + 1 * cfFactory.apply('a', []) >> cf1 + + when: + cf1.complete('x') + + then: + !result.isDone() + 1 * cfFactory.apply('b', ['x']) >> v2 + + when: + + then: + !result.isDone() + 1 * cfFactory.apply('c', ['x', 'y']) >> cf3 + + when: + cf3.complete(null) // null valued CFS are allowed + + then: + 1 * cfFactory.apply('d', ['x', 'y', null]) >> null // nulls are allowed as values + result.isDone() + result.get() == ['x', 'y', null, null] + } + def "eachSequentially propagates exception"() { given: def input = ['a', 'b', 'c'] @@ -109,6 +147,58 @@ class AsyncTest extends Specification { result.get() == ['x', 'y', 'z'] } + def "each works for mapping function with polymorphic values"() { + given: + def input = ['a', 'b', 'c'] + def cfFactory = Mock(Function) + cfFactory.apply('a') >> completedFuture('x') + cfFactory.apply('b') >> 'y' + cfFactory.apply('c') >> completedFuture('z') + + + when: + def result = Async.each(input, cfFactory) + + then: + result.isDone() + result.get() == ['x', 'y', 'z'] + } + + def "eachPolymorphic works for mapping function with polymorphic values"() { + given: + def input = ['a', 'b', 'c'] + def cfFactory = Mock(Function) + cfFactory.apply('a') >> completedFuture('x') + cfFactory.apply('b') >> 'y' + cfFactory.apply('c') >> completedFuture('z') + + + when: + def result = Async.eachPolymorphic(input, cfFactory) + + then: + result instanceof CompletableFuture + (result as CompletableFuture).isDone() + (result as CompletableFuture).get() == ['x', 'y', 'z'] + } + + def "eachPolymorphic works for mapping function with materialised values"() { + given: + def input = ['a', 'b', 'c'] + def cfFactory = Mock(Function) + cfFactory.apply('a') >> 'x' + cfFactory.apply('b') >> 'y' + cfFactory.apply('c') >> 'z' + + + when: + def result = Async.eachPolymorphic(input, cfFactory) + + then: + result instanceof List + result == ['x', 'y', 'z'] + } + def "each with mapping function propagates factory exception"() { given: def input = ['a', 'b', 'c'] @@ -130,4 +220,205 @@ class AsyncTest extends Specification { exception.getCause().getMessage() == "some error" } + + def "can wait on objects of cfs or both"() { + when: + def asyncBuilder = Async.ofExpectedSize(5) + asyncBuilder.add(completedFuture("0")) + asyncBuilder.add(completedFuture("1")) + asyncBuilder.addObject("2") + asyncBuilder.addObject("3") + asyncBuilder.add(completedFuture("4")) + + def list = asyncBuilder.await().join() + + then: + list == ["0", "1", "2", "3", "4"] + + when: + asyncBuilder = Async.ofExpectedSize(5) + asyncBuilder.add(completedFuture("0")) + asyncBuilder.add(completedFuture("1")) + asyncBuilder.add(completedFuture("2")) + asyncBuilder.add(completedFuture("3")) + asyncBuilder.add(completedFuture("4")) + + list = asyncBuilder.await().join() + + then: + list == ["0", "1", "2", "3", "4"] + + when: + asyncBuilder = Async.ofExpectedSize(5) + asyncBuilder.addObject("0") + asyncBuilder.addObject("1") + asyncBuilder.addObject("2") + asyncBuilder.addObject("3") + asyncBuilder.addObject("4") + + list = asyncBuilder.await().join() + + then: + list == ["0", "1", "2", "3", "4"] + + when: "it has a mix of CFs and objects" + asyncBuilder = Async.ofExpectedSize(5) + asyncBuilder.addObject("0") + asyncBuilder.addObject("1") + asyncBuilder.add(completedFuture("2")) + asyncBuilder.addObject("3") + asyncBuilder.addObject(completedFuture("4")) + + list = asyncBuilder.await().join() + + then: + list == ["0", "1", "2", "3", "4"] + } + + def "can wait on objects of cfs or both with empty or single values"() { + when: + def asyncBuilder = Async.ofExpectedSize(0) + def list = asyncBuilder.await().join() + + then: + list == [] + + when: + asyncBuilder = Async.ofExpectedSize(1) + asyncBuilder.add(completedFuture("A")) + list = asyncBuilder.await().join() + + then: + list == ["A"] + + when: + asyncBuilder = Async.ofExpectedSize(1) + asyncBuilder.addObject(completedFuture("A")) + list = asyncBuilder.await().join() + + then: + list == ["A"] + + when: + asyncBuilder = Async.ofExpectedSize(1) + asyncBuilder.addObject("A") + list = asyncBuilder.await().join() + + then: + list == ["A"] + } + + def "await polymorphic works as expected"() { + + when: + def asyncBuilder = Async.ofExpectedSize(5) + asyncBuilder.add(completedFuture("0")) + asyncBuilder.add(completedFuture("1")) + asyncBuilder.addObject("2") + asyncBuilder.addObject("3") + asyncBuilder.add(completedFuture("4")) + + def awaited = asyncBuilder.awaitPolymorphic() + + then: + awaited instanceof CompletableFuture + joinOrMaterialized(awaited) == ["0", "1", "2", "3", "4"] + + when: + asyncBuilder = Async.ofExpectedSize(5) + asyncBuilder.addObject(completedFuture("0")) + asyncBuilder.addObject(completedFuture("1")) + asyncBuilder.addObject(completedFuture("2")) + asyncBuilder.addObject(completedFuture("3")) + asyncBuilder.addObject(completedFuture("4")) + + awaited = asyncBuilder.awaitPolymorphic() + + then: + awaited instanceof CompletableFuture + joinOrMaterialized(awaited) == ["0", "1", "2", "3", "4"] + + when: + asyncBuilder = Async.ofExpectedSize(5) + asyncBuilder.addObject("0") + asyncBuilder.addObject("1") + asyncBuilder.addObject("2") + asyncBuilder.addObject("3") + asyncBuilder.addObject("4") + + awaited = asyncBuilder.awaitPolymorphic() + + then: + !(awaited instanceof CompletableFuture) + joinOrMaterialized(awaited) == ["0", "1", "2", "3", "4"] + + when: + asyncBuilder = Async.ofExpectedSize(0) + + awaited = asyncBuilder.awaitPolymorphic() + + then: + !(awaited instanceof CompletableFuture) + joinOrMaterialized(awaited) == [] + + when: + asyncBuilder = Async.ofExpectedSize(1) + asyncBuilder.addObject("A") + + awaited = asyncBuilder.awaitPolymorphic() + + then: + !(awaited instanceof CompletableFuture) + joinOrMaterialized(awaited) == ["A"] + + when: + asyncBuilder = Async.ofExpectedSize(1) + asyncBuilder.addObject(completedFuture("A")) + + awaited = asyncBuilder.awaitPolymorphic() + + then: + awaited instanceof CompletableFuture + joinOrMaterialized(awaited) == ["A"] + } + + def "await polymorphic works as expected with nulls"() { + + when: + def asyncBuilder = Async.ofExpectedSize(5) + asyncBuilder.add(completedFuture("0")) + asyncBuilder.add(completedFuture(null)) + asyncBuilder.addObject("2") + asyncBuilder.addObject(null) + asyncBuilder.add(completedFuture("4")) + + def awaited = asyncBuilder.awaitPolymorphic() + + then: + awaited instanceof CompletableFuture + joinOrMaterialized(awaited) == ["0", null, "2", null, "4"] + } + + def "toCompletableFutureOrMaterializedObject tested"() { + def x = "x" + def cf = completedFuture(x) + + when: + def object = Async.toCompletableFutureOrMaterializedObject(x) + then: + object == x + + when: + object = Async.toCompletableFutureOrMaterializedObject(cf) + then: + object == cf + } + + Object joinOrMaterialized(Object awaited) { + if (awaited instanceof CompletableFuture) { + return ((CompletableFuture) awaited).join() + } else { + return awaited + } + } } diff --git a/src/test/groovy/graphql/execution/BreadthFirstExecutionTestStrategy.java b/src/test/groovy/graphql/execution/BreadthFirstExecutionTestStrategy.java index 5a2bab34c1..167e58a3b9 100644 --- a/src/test/groovy/graphql/execution/BreadthFirstExecutionTestStrategy.java +++ b/src/test/groovy/graphql/execution/BreadthFirstExecutionTestStrategy.java @@ -58,12 +58,12 @@ private FetchedValue fetchField(ExecutionContext executionContext, ExecutionStra ExecutionStrategyParameters newParameters = parameters .transform(builder -> builder.field(currentField).path(fieldPath)); - return fetchField(executionContext, newParameters).join(); + return Async.toCompletableFuture(fetchField(executionContext, newParameters)).join(); } private void completeValue(ExecutionContext executionContext, Map results, String fieldName, FetchedValue fetchedValue, ExecutionStrategyParameters newParameters) { - ExecutionResult resolvedResult = completeField(executionContext, newParameters, fetchedValue).getFieldValue().join(); - results.put(fieldName, resolvedResult != null ? resolvedResult.getData() : null); + Object resolvedResult = completeField(executionContext, newParameters, fetchedValue).getFieldValueFuture().join(); + results.put(fieldName, resolvedResult); } } diff --git a/src/test/groovy/graphql/execution/BreadthFirstTestStrategy.java b/src/test/groovy/graphql/execution/BreadthFirstTestStrategy.java index b5f073e79e..653e035df1 100644 --- a/src/test/groovy/graphql/execution/BreadthFirstTestStrategy.java +++ b/src/test/groovy/graphql/execution/BreadthFirstTestStrategy.java @@ -41,7 +41,7 @@ private Map fetchFields(ExecutionContext executionContext, for (String fieldName : fields.keySet()) { ExecutionStrategyParameters newParameters = newParameters(parameters, fields, fieldName); - CompletableFuture fetchFuture = fetchField(executionContext, newParameters); + CompletableFuture fetchFuture = Async.toCompletableFuture(fetchField(executionContext, newParameters)); fetchFutures.put(fieldName, fetchFuture); } @@ -63,8 +63,8 @@ private CompletableFuture completeFields(ExecutionContext execu FetchedValue fetchedValue = fetchedValues.get(fieldName); try { - ExecutionResult resolvedResult = completeField(executionContext, newParameters, fetchedValue).getFieldValue().join(); - results.put(fieldName, resolvedResult != null ? resolvedResult.getData() : null); + Object resolvedResult = completeField(executionContext, newParameters, fetchedValue).getFieldValueFuture().join(); + results.put(fieldName, resolvedResult); } catch (NonNullableFieldWasNullException e) { assertNonNullFieldPrecondition(e); results = null; diff --git a/src/test/groovy/graphql/execution/ConditionalNodesTest.groovy b/src/test/groovy/graphql/execution/ConditionalNodesTest.groovy index 629f1fde98..3b75b05b0b 100644 --- a/src/test/groovy/graphql/execution/ConditionalNodesTest.groovy +++ b/src/test/groovy/graphql/execution/ConditionalNodesTest.groovy @@ -111,10 +111,18 @@ class ConditionalNodesTest extends Specification { type Query { in : String out : String + pet : Pet + } + + type Pet { + name: String + favouriteSnack: String } """ DataFetcher df = { DataFetchingEnvironment env -> env.getFieldDefinition().name } - def graphQL = TestUtil.graphQL(sdl, [Query: ["in": df, "out": df]]).build() + def graphQL = TestUtil.graphQL(sdl, [ + Query: ["in": df, "out": df, "pet": (DataFetcher) { [ : ] } ], + Pet: ["name": df, "favouriteSnack": df]]).build() ConditionalNodeDecision customDecision = new ConditionalNodeDecision() { @Override boolean shouldInclude(ConditionalNodeDecisionEnvironment env) { @@ -183,6 +191,25 @@ class ConditionalNodesTest extends Specification { then: er["data"] == ["in": "in", "out": "out"] + + // A test for fields below the top level + when: + ei = ExecutionInput.newExecutionInput() + .graphQLContext(contextMap) + .query(""" + query q { + in + pet { + name + favouriteSnack @featureFlag(flagName : "OFF") + } + } + """ + ).build() + er = graphQL.execute(ei) + + then: + er["data"] == ["in": "in", "pet": ["name": "name"]] } private ArrayList directive(String name, Argument argument) { diff --git a/src/test/groovy/graphql/execution/DataFetcherResultTest.groovy b/src/test/groovy/graphql/execution/DataFetcherResultTest.groovy index a7bc5b7fa7..35fbfe2f1d 100644 --- a/src/test/groovy/graphql/execution/DataFetcherResultTest.groovy +++ b/src/test/groovy/graphql/execution/DataFetcherResultTest.groovy @@ -7,7 +7,7 @@ import spock.lang.Specification class DataFetcherResultTest extends Specification { - def error1 = new ValidationError(ValidationErrorType.DuplicateOperationName) + def error1 = ValidationError.newValidationError().validationErrorType(ValidationErrorType.DuplicateOperationName).build() def error2 = new InvalidSyntaxError([], "Boo") def "basic building"() { diff --git a/src/test/groovy/graphql/execution/ExecutionContextBuilderTest.groovy b/src/test/groovy/graphql/execution/ExecutionContextBuilderTest.groovy index eb35f277b7..210c442989 100644 --- a/src/test/groovy/graphql/execution/ExecutionContextBuilderTest.groovy +++ b/src/test/groovy/graphql/execution/ExecutionContextBuilderTest.groovy @@ -54,7 +54,7 @@ class ExecutionContextBuilderTest extends Specification { executionContext.root == root executionContext.context == context // Retain deprecated method for test coverage executionContext.graphQLContext == graphQLContext - executionContext.variables == [var: 'value'] // Retain deprecated method for test coverage + executionContext.getCoercedVariables().toMap() == [var: 'value'] executionContext.getFragmentsByName() == [MyFragment: fragment] executionContext.operationDefinition == operation executionContext.dataLoaderRegistry == dataLoaderRegistry diff --git a/src/test/groovy/graphql/execution/ExecutionStrategyEquivalenceTest.groovy b/src/test/groovy/graphql/execution/ExecutionStrategyEquivalenceTest.groovy index 17d998699a..93e58e3aa6 100644 --- a/src/test/groovy/graphql/execution/ExecutionStrategyEquivalenceTest.groovy +++ b/src/test/groovy/graphql/execution/ExecutionStrategyEquivalenceTest.groovy @@ -6,8 +6,6 @@ import graphql.StarWarsSchema import spock.lang.Specification import spock.lang.Unroll -import java.util.concurrent.ForkJoinPool - /** * This allows the testing of different execution strategies that provide the same results given the same schema, * and queries and results @@ -27,8 +25,7 @@ class ExecutionStrategyEquivalenceTest extends Specification { name } } - """ - : [ + """: [ hero: [ name: 'R2-D2' ] @@ -46,8 +43,7 @@ class ExecutionStrategyEquivalenceTest extends Specification { } } } - """ - : [ + """: [ hero: [ id : '2001', name : 'R2-D2', @@ -73,8 +69,7 @@ class ExecutionStrategyEquivalenceTest extends Specification { name } } - """ - : [ + """: [ human: [ name: 'Luke Skywalker' ] @@ -93,8 +88,7 @@ class ExecutionStrategyEquivalenceTest extends Specification { name } } - """ - : [ + """: [ luke: [ name: 'Luke Skywalker' @@ -132,11 +126,11 @@ class ExecutionStrategyEquivalenceTest extends Specification { where: - strategyType | strategyUnderTest | expectedQueriesAndResults - "async" | new AsyncExecutionStrategy() | standardQueriesAndResults() - "asyncSerial" | new AsyncSerialExecutionStrategy() | standardQueriesAndResults() - "breadthFirst" | new BreadthFirstExecutionTestStrategy() | standardQueriesAndResults() - "breadthFirst" | new BreadthFirstTestStrategy() | standardQueriesAndResults() + strategyType | strategyUnderTest | expectedQueriesAndResults + "async" | new AsyncExecutionStrategy() | standardQueriesAndResults() + "asyncSerial" | new AsyncSerialExecutionStrategy() | standardQueriesAndResults() + "breadthFirst" | new BreadthFirstExecutionTestStrategy() | standardQueriesAndResults() + "breadthFirst" | new BreadthFirstTestStrategy() | standardQueriesAndResults() } diff --git a/src/test/groovy/graphql/execution/ExecutionStrategyErrorsTest.groovy b/src/test/groovy/graphql/execution/ExecutionStrategyErrorsTest.groovy index b2ffa9e3cf..55b6afb6cb 100644 --- a/src/test/groovy/graphql/execution/ExecutionStrategyErrorsTest.groovy +++ b/src/test/groovy/graphql/execution/ExecutionStrategyErrorsTest.groovy @@ -2,7 +2,6 @@ package graphql.execution import graphql.ExceptionWhileDataFetching import graphql.ExecutionInput -import graphql.ExecutionResult import graphql.GraphQL import graphql.SerializationError import graphql.TestUtil @@ -70,11 +69,11 @@ class ExecutionStrategyErrorsTest extends Specification { Instrumentation instrumentation = new SimplePerformantInstrumentation() { @Override - InstrumentationContext beginFieldListComplete(InstrumentationFieldCompleteParameters parameters, InstrumentationState state) { + InstrumentationContext beginFieldListCompletion(InstrumentationFieldCompleteParameters parameters, InstrumentationState state) { if (parameters.field.name == "diceyListCallAbort") { throw new AbortExecutionException("No lists for you") } - return super.beginFieldListComplete(parameters, state) + return super.beginFieldListCompletion(parameters, state) } } def graphQL = GraphQL.newGraphQL(schema).instrumentation(instrumentation).build() @@ -108,7 +107,7 @@ class ExecutionStrategyErrorsTest extends Specification { def er = graphQL.execute(ei) then: - er.errors.size() == 6 + er.errors.size() == 7 er.errors[0] instanceof TypeMismatchError er.errors[0].path == ["notAList"] @@ -116,17 +115,19 @@ class ExecutionStrategyErrorsTest extends Specification { er.errors[1].path == ["notAFloat"] er.errors[2] instanceof ExceptionWhileDataFetching - er.errors[2].path ==["notAnProperObject", "diceyListCall", 0, "bang"] - ((ExceptionWhileDataFetching)er.errors[2]).exception.message == "dicey call" + er.errors[2].path == ["notAnProperObject", "diceyListCall", 0, "bang"] + ((ExceptionWhileDataFetching) er.errors[2]).exception.message == "dicey call" er.errors[3] instanceof ExceptionWhileDataFetching - er.errors[3].path ==["notAnProperObject", "diceyListCall", 0, "abort"] - ((ExceptionWhileDataFetching)er.errors[3]).exception.message == "abort abort" + er.errors[3].path == ["notAnProperObject", "diceyListCall", 0, "abort"] + ((ExceptionWhileDataFetching) er.errors[3]).exception.message == "abort abort" er.errors[4] instanceof NonNullableFieldWasNullError - er.errors[4].path ==["notAnProperObject", "diceyListCall", 0, "nonNull"] + er.errors[4].path == ["notAnProperObject", "diceyListCall", 0, "nonNull"] er.errors[5] instanceof NonNullableFieldWasNullError - er.errors[5].path ==["notAnProperObject", "diceyListCall", 1] // the entry itself was null in a non null list entry + er.errors[5].path == ["notAnProperObject", "diceyListCall", 1] // the entry itself was null in a non null list entry + + er.errors[6] instanceof AbortExecutionException } } diff --git a/src/test/groovy/graphql/execution/ExecutionStrategyExceptionHandlingEquivalenceTest.groovy b/src/test/groovy/graphql/execution/ExecutionStrategyExceptionHandlingEquivalenceTest.groovy index af179821cb..2c8653491e 100644 --- a/src/test/groovy/graphql/execution/ExecutionStrategyExceptionHandlingEquivalenceTest.groovy +++ b/src/test/groovy/graphql/execution/ExecutionStrategyExceptionHandlingEquivalenceTest.groovy @@ -18,7 +18,7 @@ class ExecutionStrategyExceptionHandlingEquivalenceTest extends Specification { @Override InstrumentationContext beginFieldFetch(InstrumentationFieldFetchParameters parameters, InstrumentationState state) { - throw new AbortExecutionException([new ValidationError(ValidationErrorType.UnknownType)]) // Retain as there is no alternative constructor for ValidationError + throw new AbortExecutionException([ValidationError.newValidationError().validationErrorType(ValidationErrorType.UnknownType).build()]) } } diff --git a/src/test/groovy/graphql/execution/ExecutionStrategyTest.groovy b/src/test/groovy/graphql/execution/ExecutionStrategyTest.groovy index 412ff3fc65..6a3d72ec07 100644 --- a/src/test/groovy/graphql/execution/ExecutionStrategyTest.groovy +++ b/src/test/groovy/graphql/execution/ExecutionStrategyTest.groovy @@ -139,7 +139,7 @@ class ExecutionStrategyTest extends Specification { executionStrategy.completeValue(executionContext, parameters) then: - 1 * executionContext.queryStrategy.execute(_, _) + 1 * executionContext.queryStrategy.executeObject(_, _) >> CompletableFuture.completedFuture(null) 0 * executionContext.mutationStrategy.execute(_, _) 0 * executionContext.subscriptionStrategy.execute(_, _) } @@ -165,10 +165,10 @@ class ExecutionStrategyTest extends Specification { .build() when: - def executionResult = executionStrategy.completeValue(executionContext, parameters).fieldValue.join() + def executionResult = executionStrategy.completeValue(executionContext, parameters).fieldValueFuture.join() then: - executionResult.data == result + executionResult == result } def "completes value for java.util.Optional"() { @@ -186,10 +186,10 @@ class ExecutionStrategyTest extends Specification { .build() when: - def executionResult = executionStrategy.completeValue(executionContext, parameters).fieldValue.join() + def executionResult = executionStrategy.completeValue(executionContext, parameters).fieldValueFuture.join() then: - executionResult.data == expected + executionResult == expected where: result || expected @@ -212,7 +212,7 @@ class ExecutionStrategyTest extends Specification { .build() when: - executionStrategy.completeValue(executionContext, parameters).fieldValue.join() + executionStrategy.completeValue(executionContext, parameters).fieldValueFuture.join() then: def e = thrown(CompletionException) @@ -234,10 +234,10 @@ class ExecutionStrategyTest extends Specification { .build() when: - def executionResult = executionStrategy.completeValue(executionContext, parameters).fieldValue.join() + def executionResult = executionStrategy.completeValue(executionContext, parameters).fieldValueFuture.join() then: - executionResult.data == expected + executionResult == expected where: result || expected @@ -260,7 +260,7 @@ class ExecutionStrategyTest extends Specification { .build() when: - executionStrategy.completeValue(executionContext, parameters).fieldValue.join() + executionStrategy.completeValue(executionContext, parameters).fieldValueFuture.join() then: def e = thrown(CompletionException) @@ -282,10 +282,10 @@ class ExecutionStrategyTest extends Specification { .build() when: - def executionResult = executionStrategy.completeValue(executionContext, parameters).fieldValue.join() + def executionResult = executionStrategy.completeValue(executionContext, parameters).fieldValueFuture.join() then: - executionResult.data == expected + executionResult == expected where: result || expected @@ -308,7 +308,7 @@ class ExecutionStrategyTest extends Specification { .build() when: - executionStrategy.completeValue(executionContext, parameters).fieldValue.join() + executionStrategy.completeValue(executionContext, parameters).fieldValueFuture.join() then: def e = thrown(CompletionException) @@ -330,10 +330,10 @@ class ExecutionStrategyTest extends Specification { .build() when: - def executionResult = executionStrategy.completeValue(executionContext, parameters).fieldValue.join() + def executionResult = executionStrategy.completeValue(executionContext, parameters).fieldValueFuture.join() then: - executionResult.data == expected + executionResult == expected where: result || expected @@ -356,7 +356,7 @@ class ExecutionStrategyTest extends Specification { .build() when: - executionStrategy.completeValue(executionContext, parameters).fieldValue.join() + executionStrategy.completeValue(executionContext, parameters).fieldValueFuture.join() then: def e = thrown(CompletionException) @@ -380,10 +380,10 @@ class ExecutionStrategyTest extends Specification { .build() when: - def executionResult = executionStrategy.completeValue(executionContext, parameters).fieldValue.join() + def executionResult = executionStrategy.completeValue(executionContext, parameters).fieldValueFuture.join() then: - executionResult.data == result + executionResult == result } def "completing value with serializing throwing exception"() { @@ -402,10 +402,10 @@ class ExecutionStrategyTest extends Specification { .build() when: - def executionResult = executionStrategy.completeValue(executionContext, parameters).fieldValue.join() + def executionResult = executionStrategy.completeValue(executionContext, parameters).fieldValueFuture.join() then: - executionResult.data == null + executionResult == null executionContext.errors.size() == 1 executionContext.errors[0] instanceof SerializationError @@ -427,10 +427,10 @@ class ExecutionStrategyTest extends Specification { .build() when: - def executionResult = executionStrategy.completeValue(executionContext, parameters).fieldValue.join() + def executionResult = executionStrategy.completeValue(executionContext, parameters).fieldValueFuture.join() then: - executionResult.data == null + executionResult == null executionContext.errors.size() == 1 executionContext.errors[0] instanceof SerializationError @@ -681,12 +681,13 @@ class ExecutionStrategyTest extends Specification { Map fetchedValues = [:] @Override - InstrumentationContext beginFieldComplete(InstrumentationFieldCompleteParameters parameters, InstrumentationState state) { + @Override + InstrumentationContext beginFieldCompletion(InstrumentationFieldCompleteParameters parameters, InstrumentationState state) { if (parameters.fetchedValue instanceof FetchedValue) { FetchedValue value = (FetchedValue) parameters.fetchedValue fetchedValues.put(parameters.field.name, value) } - return super.beginFieldComplete(parameters, state) + return super.beginFieldCompletion(parameters, state) } } ExecutionContext instrumentedExecutionContext = executionContextBuilder.instrumentation(instrumentation).build() @@ -786,10 +787,10 @@ class ExecutionStrategyTest extends Specification { .build() when: - def executionResult = executionStrategy.completeValue(executionContext, parameters).fieldValue + def executionResult = executionStrategy.completeValue(executionContext, parameters).fieldValueFuture.join() then: - executionResult.get().data == [1, 2, 3] + executionResult == [1, 2, 3] } def "#842 completes value for java.util.Stream"() { @@ -810,10 +811,10 @@ class ExecutionStrategyTest extends Specification { .build() when: - def executionResult = executionStrategy.completeValue(executionContext, parameters).fieldValue + def executionResult = executionStrategy.completeValue(executionContext, parameters).fieldValueFuture.join() then: - executionResult.get().data == [1, 2, 3] + executionResult == [1, 2, 3] } def "#842 completes value for java.util.Iterator"() { @@ -834,10 +835,10 @@ class ExecutionStrategyTest extends Specification { .build() when: - def executionResult = executionStrategy.completeValue(executionContext, parameters).fieldValue + def executionResult = executionStrategy.completeValue(executionContext, parameters).fieldValueFuture.join() then: - executionResult.get().data == [1, 2, 3] + executionResult == [1, 2, 3] } @@ -940,10 +941,10 @@ class ExecutionStrategyTest extends Specification { .build() when: - def executionResult = executionStrategy.completeValue(executionContext, parameters).fieldValue + def executionResult = executionStrategy.completeValue(executionContext, parameters).fieldValueFuture.join() then: - executionResult.get().data == [1L, 2L, 3L] + executionResult == [1L, 2L, 3L] } def "when completeValue expects GraphQLList and non iterable or non array is passed then it should yield a TypeMismatch error"() { @@ -964,10 +965,10 @@ class ExecutionStrategyTest extends Specification { .build() when: - def executionResult = executionStrategy.completeValue(executionContext, parameters).fieldValue.join() + def executionResult = executionStrategy.completeValue(executionContext, parameters).fieldValueFuture.join() then: - executionResult.data == null + executionResult == null executionContext.errors.size() == 1 executionContext.errors[0] instanceof TypeMismatchError } diff --git a/src/test/groovy/graphql/execution/ExecutionTest.groovy b/src/test/groovy/graphql/execution/ExecutionTest.groovy index 0cc8f1d287..7130beca0f 100644 --- a/src/test/groovy/graphql/execution/ExecutionTest.groovy +++ b/src/test/groovy/graphql/execution/ExecutionTest.groovy @@ -8,7 +8,6 @@ import graphql.execution.instrumentation.InstrumentationState import graphql.execution.instrumentation.SimplePerformantInstrumentation import graphql.execution.instrumentation.parameters.InstrumentationExecutionParameters import graphql.parser.Parser -import org.jetbrains.annotations.NotNull import spock.lang.Specification import java.util.concurrent.CompletableFuture @@ -36,7 +35,7 @@ class ExecutionTest extends Specification { def subscriptionStrategy = new CountingExecutionStrategy() def mutationStrategy = new CountingExecutionStrategy() def queryStrategy = new CountingExecutionStrategy() - def execution = new Execution(queryStrategy, mutationStrategy, subscriptionStrategy, SimplePerformantInstrumentation.INSTANCE, ValueUnboxer.DEFAULT) + def execution = new Execution(queryStrategy, mutationStrategy, subscriptionStrategy, SimplePerformantInstrumentation.INSTANCE, ValueUnboxer.DEFAULT, false) def emptyExecutionInput = ExecutionInput.newExecutionInput().query("query").build() def instrumentationState = new InstrumentationState() {} @@ -114,18 +113,18 @@ class ExecutionTest extends Specification { def instrumentation = new SimplePerformantInstrumentation() { - @Override + @Override ExecutionContext instrumentExecutionContext(ExecutionContext executionContext, InstrumentationExecutionParameters parameters, InstrumentationState state) { - - return ExecutionContextBuilder.newExecutionContextBuilder(executionContext) - .queryStrategy(queryStrategyUpdatedToDuringExecutionContextInstrument) - .build() - } - } - - def execution = new Execution(queryStrategy, mutationStrategy, subscriptionStrategy, instrumentation, ValueUnboxer.DEFAULT) + + return ExecutionContextBuilder.newExecutionContextBuilder(executionContext) + .queryStrategy(queryStrategyUpdatedToDuringExecutionContextInstrument) + .build() + } + } + + def execution = new Execution(queryStrategy, mutationStrategy, subscriptionStrategy, instrumentation, ValueUnboxer.DEFAULT, false) when: diff --git a/src/test/groovy/graphql/execution/FieldCollectorTest.groovy b/src/test/groovy/graphql/execution/FieldCollectorTest.groovy index 1fa8f360e8..6460512543 100644 --- a/src/test/groovy/graphql/execution/FieldCollectorTest.groovy +++ b/src/test/groovy/graphql/execution/FieldCollectorTest.groovy @@ -21,7 +21,7 @@ class FieldCollectorTest extends Specification { type Query { bar1: String bar2: String - } + } """) def objectType = schema.getType("Query") as GraphQLObjectType FieldCollector fieldCollector = new FieldCollector() @@ -48,12 +48,12 @@ class FieldCollectorTest extends Specification { type Query{ bar1: String bar2: Test - } + } interface Test { - fieldOnInterface: String - } + fieldOnInterface: String + } type TestImpl implements Test { - fieldOnInterface: String + fieldOnInterface: String } """) def object = schema.getType("TestImpl") as GraphQLObjectType @@ -73,6 +73,136 @@ class FieldCollectorTest extends Specification { then: result.getSubField('fieldOnInterface').getFields() == [interfaceField] + } + + def "collect fields that are merged together - one of the fields is on an inline fragment "() { + def schema = TestUtil.schema(""" + type Query { + echo: String + } +""") + + Document document = new Parser().parseDocument(""" + { + echo + ... on Query { + echo + } + } + +""") + + def object = schema.getType("TestImpl") as GraphQLObjectType + FieldCollector fieldCollector = new FieldCollector() + FieldCollectorParameters fieldCollectorParameters = newParameters() + .schema(schema) + .objectType(object) + .build() + + def selectionSet = ((OperationDefinition) document.children[0]).selectionSet + + when: + def result = fieldCollector.collectFields(fieldCollectorParameters, selectionSet) + + then: + result.size() == 1 + result.getSubField('echo').fields.size() == 1 + } + + def "collect fields that are merged together - fields have different selection sets "() { + def schema = TestUtil.schema(""" + type Query { + me: Me + } + + type Me { + firstname: String + lastname: String + } +""") + + Document document = new Parser().parseDocument(""" + { + me { + firstname + } + me { + lastname + } + } + +""") + + def object = schema.getType("TestImpl") as GraphQLObjectType + FieldCollector fieldCollector = new FieldCollector() + FieldCollectorParameters fieldCollectorParameters = newParameters() + .schema(schema) + .objectType(object) + .build() + + def selectionSet = ((OperationDefinition) document.children[0]).selectionSet + + when: + def result = fieldCollector.collectFields(fieldCollectorParameters, selectionSet) + + then: + result.size() == 1 + + def meField = result.getSubField('me') + + meField.fields.size() == 2 + + meField.fields[0].selectionSet.selections.size() == 1 + meField.fields[0].selectionSet.selections[0].name == "firstname" + + meField.fields[1].selectionSet.selections.size() == 1 + meField.fields[1].selectionSet.selections[0].name == "lastname" + } + + def "collect fields that are merged together - fields have different directives"() { + def schema = TestUtil.schema(""" + directive @one on FIELD + directive @two on FIELD + + type Query { + echo: String + } +""") + + Document document = new Parser().parseDocument(""" + { + echo @one + echo @two + } + +""") + + def object = schema.getType("TestImpl") as GraphQLObjectType + FieldCollector fieldCollector = new FieldCollector() + FieldCollectorParameters fieldCollectorParameters = newParameters() + .schema(schema) + .objectType(object) + .build() + + def selectionSet = ((OperationDefinition) document.children[0]).selectionSet + + when: + def result = fieldCollector.collectFields(fieldCollectorParameters, selectionSet) + + then: + result.size() == 1 + + def echoField = result.getSubField('echo') + + echoField.fields.size() == 2 + + echoField.fields[0].name == "echo" + echoField.fields[0].directives.size() == 1 + echoField.fields[0].directives[0].name == "one" + + echoField.fields[1].name == "echo" + echoField.fields[1].directives.size() == 1 + echoField.fields[1].directives[0].name == "two" } } diff --git a/src/test/groovy/graphql/execution/FieldValueInfoTest.groovy b/src/test/groovy/graphql/execution/FieldValueInfoTest.groovy index 0fe4c30cec..5720d28444 100644 --- a/src/test/groovy/graphql/execution/FieldValueInfoTest.groovy +++ b/src/test/groovy/graphql/execution/FieldValueInfoTest.groovy @@ -3,26 +3,26 @@ package graphql.execution import graphql.AssertException import spock.lang.Specification +import java.util.concurrent.CompletableFuture -class FieldValueInfoTest extends Specification{ +import static graphql.execution.FieldValueInfo.CompleteValueType.SCALAR + + +class FieldValueInfoTest extends Specification { def "simple constructor test"() { when: - def fieldValueInfo = FieldValueInfo.newFieldValueInfo().build() + def fieldValueInfo = new FieldValueInfo(SCALAR, CompletableFuture.completedFuture("A")) then: "fieldValueInfos to be empty list" fieldValueInfo.fieldValueInfos == [] as List - - and: "other fields to be null " - fieldValueInfo.fieldValue == null - fieldValueInfo.completeValueType == null + fieldValueInfo.fieldValueFuture.join() == "A" + fieldValueInfo.completeValueType == SCALAR } def "negative constructor test"() { when: - FieldValueInfo.newFieldValueInfo() - .fieldValueInfos(null) - .build() + new FieldValueInfo(SCALAR, CompletableFuture.completedFuture("A"), null) then: def assEx = thrown(AssertException) assEx.message.contains("fieldValueInfos") diff --git a/src/test/groovy/graphql/execution/MaterialisedAndPromisedObjectsTest.groovy b/src/test/groovy/graphql/execution/MaterialisedAndPromisedObjectsTest.groovy new file mode 100644 index 0000000000..578e3b3ea9 --- /dev/null +++ b/src/test/groovy/graphql/execution/MaterialisedAndPromisedObjectsTest.groovy @@ -0,0 +1,97 @@ +package graphql.execution + + +import graphql.ExecutionResult +import graphql.GraphQL +import graphql.TestUtil +import graphql.execution.instrumentation.Instrumentation +import graphql.execution.instrumentation.InstrumentationState +import graphql.execution.instrumentation.SimplePerformantInstrumentation +import graphql.execution.instrumentation.parameters.InstrumentationFieldFetchParameters +import graphql.schema.DataFetcher +import graphql.schema.DataFetchingEnvironment +import spock.lang.Specification + +import java.util.concurrent.CompletableFuture + +import static graphql.ExecutionInput.newExecutionInput + +class MaterialisedAndPromisedObjectsTest extends Specification { + + def sdl = """ + type Query { + foo : Foo + } + + type Foo { + bar : Bar + name : String + } + + type Bar { + foo : Foo + name : String + } + """ + + def "make sure it can fetch both materialised and promised values"() { + + def cfPromisesOnFieldRegex = ~"neverMatchesAlwaysMaterialised" + Instrumentation fetchSwitcher = new SimplePerformantInstrumentation() { + @Override + DataFetcher instrumentDataFetcher(DataFetcher dataFetcher, InstrumentationFieldFetchParameters parameters, InstrumentationState state) { + return new DataFetcher() { + @Override + Object get(DataFetchingEnvironment env) throws Exception { + def fieldName = env.getField().name + def fetchValue = dataFetcher.get(env) + // if it matches the regex - we send back an async promise value + if (fieldName =~ cfPromisesOnFieldRegex) { + return CompletableFuture.supplyAsync { -> fetchValue } + } + // just the materialised value! + return fetchValue + } + } + } + } + + GraphQL graphQL = TestUtil.graphQL(sdl).instrumentation(fetchSwitcher).build() + + + def source = [foo: [bar: [foo: [name: "stop"]]]] + def expectedData = [foo: [bar: [foo: [name: "stop"]]]] + + def query = """ { foo { bar { foo { name }}}} """ + + + when: "always materialised - no promises" + + cfPromisesOnFieldRegex = ~"neverMatchesAlwaysMaterialised" + ExecutionResult er = graphQL.execute(newExecutionInput(query).root(source)) + + + then: + er.errors.isEmpty() + er.data == expectedData + + when: "everything is promises" + + cfPromisesOnFieldRegex = ~".*" + er = graphQL.execute(newExecutionInput(query).root(source)) + + then: + er.errors.isEmpty() + er.data == expectedData + + + when: "only foo fields are CF promises so a mix of materialised and promised values" + + cfPromisesOnFieldRegex = ~"foo" + er = graphQL.execute(newExecutionInput(query).root(source)) + + then: + er.errors.isEmpty() + er.data == expectedData + } +} diff --git a/src/test/groovy/graphql/execution/SubscriptionExecutionStrategyTest.groovy b/src/test/groovy/graphql/execution/SubscriptionExecutionStrategyTest.groovy index 4a8031a805..c6e0f5a52b 100644 --- a/src/test/groovy/graphql/execution/SubscriptionExecutionStrategyTest.groovy +++ b/src/test/groovy/graphql/execution/SubscriptionExecutionStrategyTest.groovy @@ -8,6 +8,7 @@ import graphql.GraphQLError import graphql.GraphqlErrorBuilder import graphql.TestUtil import graphql.TypeMismatchError +import graphql.execution.instrumentation.InstrumentationState import graphql.execution.instrumentation.LegacyTestingInstrumentation import graphql.execution.instrumentation.parameters.InstrumentationExecutionParameters import graphql.execution.pubsub.CapturingSubscriber @@ -579,7 +580,7 @@ class SubscriptionExecutionStrategyTest extends Specification { def instrumentResultCalls = [] LegacyTestingInstrumentation instrumentation = new LegacyTestingInstrumentation() { @Override - CompletableFuture instrumentExecutionResult(ExecutionResult executionResult, InstrumentationExecutionParameters parameters) { + CompletableFuture instrumentExecutionResult(ExecutionResult executionResult, InstrumentationExecutionParameters parameters, InstrumentationState state) { instrumentResultCalls.add("instrumentExecutionResult") return CompletableFuture.completedFuture(executionResult) } diff --git a/src/test/groovy/graphql/execution/ValuesResolverTest.groovy b/src/test/groovy/graphql/execution/ValuesResolverTest.groovy index 0e53791a3b..1a7aa22b3e 100644 --- a/src/test/groovy/graphql/execution/ValuesResolverTest.groovy +++ b/src/test/groovy/graphql/execution/ValuesResolverTest.groovy @@ -23,7 +23,6 @@ import graphql.language.Value import graphql.language.VariableDefinition import graphql.language.VariableReference import graphql.schema.CoercingParseValueException -import graphql.schema.GraphQLNonNull import spock.lang.Specification import spock.lang.Unroll @@ -373,7 +372,7 @@ class ValuesResolverTest extends Specification { e.message == "Exactly one key must be specified for OneOf type 'oneOfInputObject'." when: "input type is wrapped in non-null" - def nonNullInputObjectType = GraphQLNonNull.nonNull(inputObjectType) + def nonNullInputObjectType = nonNull(inputObjectType) def fieldArgumentNonNull = newArgument().name("arg").type(nonNullInputObjectType).build() ValuesResolver.getArgumentValues([fieldArgumentNonNull], [argument], variables, graphQLContext, locale) @@ -427,6 +426,144 @@ class ValuesResolverTest extends Specification { | CoercedVariables.of(["var": [:]]) } + def "getArgumentValues: invalid oneOf nested input because of duplicate keys - #testCase"() { + given: "schema defining input object" + def oneOfObjectType = newInputObject() + .name("OneOfInputObject") + .withAppliedDirective(Directives.OneOfDirective.toAppliedDirective()) + .field(newInputObjectField() + .name("a") + .type(GraphQLString) + .build()) + .field(newInputObjectField() + .name("b") + .type(GraphQLInt) + .build()) + .build() + + def parentObjectType = newInputObject() + .name("ParentInputObject") + .field(newInputObjectField() + .name("oneOfField") + .type(oneOfObjectType) + .build()) + .build() + + def argument = new Argument("arg", inputValue) + + when: + def fieldArgument = newArgument().name("arg").type(parentObjectType).build() + ValuesResolver.getArgumentValues([fieldArgument], [argument], variables, graphQLContext, locale) + + then: + def e = thrown(OneOfTooManyKeysException) + e.message == "Exactly one key must be specified for OneOf type 'OneOfInputObject'." + + where: + testCase | inputValue | variables + '{oneOfField: {a: "abc", b: 123} } {}' | buildObjectLiteral([ + oneOfField: [ + a: StringValue.of("abc"), + b: IntValue.of(123) + ] + ]) | CoercedVariables.emptyVariables() + '{oneOfField: {a: null, b: 123 }} {}' | buildObjectLiteral([ + oneOfField: [ + a: NullValue.of(), + b: IntValue.of(123) + ] + ]) | CoercedVariables.emptyVariables() + + '{oneOfField: {a: $var, b: 123 }} { var: null }' | buildObjectLiteral([ + oneOfField: [ + a: VariableReference.of("var"), + b: IntValue.of(123) + ] + ]) | CoercedVariables.of(["var": null]) + + '{oneOfField: {a: $var, b: 123 }} {}' | buildObjectLiteral([ + oneOfField: [ + a: VariableReference.of("var"), + b: IntValue.of(123) + ] + ]) | CoercedVariables.emptyVariables() + + '{oneOfField: {a : "abc", b : null}} {}' | buildObjectLiteral([ + oneOfField: [ + a: StringValue.of("abc"), + b: NullValue.of() + ] + ]) | CoercedVariables.emptyVariables() + + '{oneOfField: {a : null, b : null}} {}' | buildObjectLiteral([ + oneOfField: [ + a: NullValue.of(), + b: NullValue.of() + ] + ]) | CoercedVariables.emptyVariables() + + '{oneOfField: {a : $a, b : $b}} {a : "abc"}' | buildObjectLiteral([ + oneOfField: [ + a: VariableReference.of("a"), + b: VariableReference.of("v") + ] + ]) | CoercedVariables.of(["a": "abc"]) + '$var {var : {oneOfField: { a : "abc", b : 123}}}' | VariableReference.of("var") + | CoercedVariables.of(["var": ["oneOfField": ["a": "abc", "b": 123]]]) + + '$var {var : {oneOfField: {} }}' | VariableReference.of("var") + | CoercedVariables.of(["var": ["oneOfField": [:]]]) + + } + + def "getArgumentValues: invalid oneOf nested input because of null value - #testCase"() { + given: "schema defining input object" + def oneOfObjectType = newInputObject() + .name("OneOfInputObject") + .withAppliedDirective(Directives.OneOfDirective.toAppliedDirective()) + .field(newInputObjectField() + .name("a") + .type(GraphQLString) + .build()) + .field(newInputObjectField() + .name("b") + .type(GraphQLInt) + .build()) + .build() + + def parentObjectType = newInputObject() + .name("ParentInputObject") + .field(newInputObjectField() + .name("oneOfField") + .type(oneOfObjectType) + .build()) + .build() + + + def fieldArgument = newArgument().name("arg").type(parentObjectType).build() + + when: + def argument = new Argument("arg", inputValue) + ValuesResolver.getArgumentValues([fieldArgument], [argument], variables, graphQLContext, locale) + + then: + def e = thrown(OneOfNullValueException) + e.message == "OneOf type field 'OneOfInputObject.a' must be non-null." + + where: + // from https://github.com/graphql/graphql-spec/pull/825/files#diff-30a69c5a5eded8e1aea52e53dad1181e6ec8f549ca2c50570b035153e2de1c43R1692 + testCase | inputValue | variables + + '`{ oneOfField: { a: null }}` {}' | buildObjectLiteral([ + oneOfField: [a: NullValue.of()] + ]) | CoercedVariables.emptyVariables() + + '`{ oneOfField: { a: $var }}` { var : null}' | buildObjectLiteral([ + oneOfField: [a: VariableReference.of("var")] + ]) | CoercedVariables.of(["var": null]) + + } + def "getArgumentValues: invalid oneOf input because of null value - #testCase"() { given: "schema defining input object" def inputObjectType = newInputObject() @@ -463,6 +600,246 @@ class ValuesResolverTest extends Specification { a: VariableReference.of("var") ]) | CoercedVariables.of(["var": null]) + '`{ a: $var }` { }' | buildObjectLiteral([ + a: VariableReference.of("var") + ]) | CoercedVariables.emptyVariables() + } + + def "getArgumentValues: invalid oneOf list input because element contains duplicate key - #testCase"() { + given: "schema defining input object" + def inputObjectType = newInputObject() + .name("oneOfInputObject") + .withAppliedDirective(Directives.OneOfDirective.toAppliedDirective()) + .field(newInputObjectField() + .name("a") + .type(GraphQLString) + .build()) + .field(newInputObjectField() + .name("b") + .type(GraphQLInt) + .build()) + .build() + + when: + def argument = new Argument("arg", inputArray) + def fieldArgumentList = newArgument().name("arg").type(list(inputObjectType)).build() + ValuesResolver.getArgumentValues([fieldArgumentList], [argument], variables, graphQLContext, locale) + + then: + def e = thrown(OneOfTooManyKeysException) + e.message == "Exactly one key must be specified for OneOf type 'oneOfInputObject'." + + where: + + testCase | inputArray | variables + + '[{ a: "abc", b: 123 }]' + | ArrayValue.newArrayValue() + .value(buildObjectLiteral([ + a: StringValue.of("abc"), + b: IntValue.of(123) + ])).build() + | CoercedVariables.emptyVariables() + + '[{ a: "abc" }, { a: "xyz", b: 789 }]' + | ArrayValue.newArrayValue() + .values([ + buildObjectLiteral([ + a: StringValue.of("abc") + ]), + buildObjectLiteral([ + a: StringValue.of("xyz"), + b: IntValue.of(789) + ]), + ]).build() + | CoercedVariables.emptyVariables() + + '[{ a: "abc" }, $var ] [{ a: "abc" }, { a: "xyz", b: 789 }]' + | ArrayValue.newArrayValue() + .values([ + buildObjectLiteral([ + a: StringValue.of("abc") + ]), + VariableReference.of("var") + ]).build() + | CoercedVariables.of("var": [a: "xyz", b: 789]) + + } + + def "getArgumentValues: invalid oneOf list input because element contains null value - #testCase"() { + given: "schema defining input object" + def inputObjectType = newInputObject() + .name("oneOfInputObject") + .withAppliedDirective(Directives.OneOfDirective.toAppliedDirective()) + .field(newInputObjectField() + .name("a") + .type(GraphQLString) + .build()) + .field(newInputObjectField() + .name("b") + .type(GraphQLInt) + .build()) + .build() + + when: + def argument = new Argument("arg", inputArray) + def fieldArgumentList = newArgument().name("arg").type(list(inputObjectType)).build() + ValuesResolver.getArgumentValues([fieldArgumentList], [argument], variables, graphQLContext, locale) + + then: + def e = thrown(OneOfNullValueException) + e.message == "OneOf type field 'oneOfInputObject.a' must be non-null." + + where: + + testCase | inputArray | variables + + '[{ a: "abc" }, { a: null }]' + | ArrayValue.newArrayValue() + .values([ + buildObjectLiteral([ + a: StringValue.of("abc") + ]), + buildObjectLiteral([ + a: NullValue.of() + ]), + ]).build() + | CoercedVariables.emptyVariables() + + '[{ a: "abc" }, { a: $var }] [{ a: "abc" }, { a: null }]' + | ArrayValue.newArrayValue() + .values([ + buildObjectLiteral([ + a: StringValue.of("abc") + ]), + buildObjectLiteral([ + a: VariableReference.of("var") + ]), + ]).build() + | CoercedVariables.of("var": null) + + } + + def "getArgumentValues: invalid oneOf non-null list input because element contains duplicate key - #testCase"() { + given: "schema defining input object" + def inputObjectType = newInputObject() + .name("oneOfInputObject") + .withAppliedDirective(Directives.OneOfDirective.toAppliedDirective()) + .field(newInputObjectField() + .name("a") + .type(GraphQLString) + .build()) + .field(newInputObjectField() + .name("b") + .type(GraphQLInt) + .build()) + .build() + + when: + def argument = new Argument("arg", inputArray) + def fieldArgumentList = newArgument().name("arg").type(nonNull(list(inputObjectType))).build() + ValuesResolver.getArgumentValues([fieldArgumentList], [argument], variables, graphQLContext, locale) + + then: + def e = thrown(OneOfTooManyKeysException) + e.message == "Exactly one key must be specified for OneOf type 'oneOfInputObject'." + + where: + + testCase | inputArray | variables + + '[{ a: "abc", b: 123 }]' + | ArrayValue.newArrayValue() + .value(buildObjectLiteral([ + a: StringValue.of("abc"), + b: IntValue.of(123) + ])).build() + | CoercedVariables.emptyVariables() + + '[{ a: "abc" }, { a: "xyz", b: 789 }]' + | ArrayValue.newArrayValue() + .values([ + buildObjectLiteral([ + a: StringValue.of("abc") + ]), + buildObjectLiteral([ + a: StringValue.of("xyz"), + b: IntValue.of(789) + ]), + ]).build() + | CoercedVariables.emptyVariables() + + '[{ a: "abc" }, $var ] [{ a: "abc" }, { a: "xyz", b: 789 }]' + | ArrayValue.newArrayValue() + .values([ + buildObjectLiteral([ + a: StringValue.of("abc") + ]), + VariableReference.of("var") + ]).build() + | CoercedVariables.of("var": [a: "xyz", b: 789]) + + } + + def "getArgumentValues: invalid oneOf list input with non-nullable elements, because element contains duplicate key - #testCase"() { + given: "schema defining input object" + def inputObjectType = newInputObject() + .name("oneOfInputObject") + .withAppliedDirective(Directives.OneOfDirective.toAppliedDirective()) + .field(newInputObjectField() + .name("a") + .type(GraphQLString) + .build()) + .field(newInputObjectField() + .name("b") + .type(GraphQLInt) + .build()) + .build() + + when: + def argument = new Argument("arg", inputArray) + def fieldArgumentList = newArgument().name("arg").type(list(nonNull(inputObjectType))).build() + ValuesResolver.getArgumentValues([fieldArgumentList], [argument], variables, graphQLContext, locale) + + then: + def e = thrown(OneOfTooManyKeysException) + e.message == "Exactly one key must be specified for OneOf type 'oneOfInputObject'." + + where: + + testCase | inputArray | variables + + '[{ a: "abc", b: 123 }]' + | ArrayValue.newArrayValue() + .value(buildObjectLiteral([ + a: StringValue.of("abc"), + b: IntValue.of(123) + ])).build() + | CoercedVariables.emptyVariables() + + '[{ a: "abc" }, { a: "xyz", b: 789 }]' + | ArrayValue.newArrayValue() + .values([ + buildObjectLiteral([ + a: StringValue.of("abc") + ]), + buildObjectLiteral([ + a: StringValue.of("xyz"), + b: IntValue.of(789) + ]), + ]).build() + | CoercedVariables.emptyVariables() + + '[{ a: "abc" }, $var ] [{ a: "abc" }, { a: "xyz", b: 789 }]' + | ArrayValue.newArrayValue() + .values([ + buildObjectLiteral([ + a: StringValue.of("abc") + ]), + VariableReference.of("var") + ]).build() + | CoercedVariables.of("var": [a: "xyz", b: 789]) + } def "getArgumentValues: valid oneOf input - #testCase"() { @@ -513,6 +890,54 @@ class ValuesResolverTest extends Specification { } + def "getArgumentValues: valid oneOf list input - #testCase"() { + given: "schema defining input object" + def inputObjectType = newInputObject() + .name("oneOfInputObject") + .withAppliedDirective(Directives.OneOfDirective.toAppliedDirective()) + .field(newInputObjectField() + .name("a") + .type(GraphQLString) + .build()) + .field(newInputObjectField() + .name("b") + .type(GraphQLInt) + .build()) + .build() + + when: + def argument = new Argument("arg", inputArray) + def fieldArgumentList = newArgument().name("arg").type(list(inputObjectType)).build() + def values = ValuesResolver.getArgumentValues([fieldArgumentList], [argument], variables, graphQLContext, locale) + + then: + values == expectedValues + + where: + + testCase | inputArray | variables | expectedValues + + '[{ a: "abc"}]' + | ArrayValue.newArrayValue() + .value(buildObjectLiteral([ + a: StringValue.of("abc"), + ])).build() + | CoercedVariables.emptyVariables() + | [arg: [[a: "abc"]]] + + '[{ a: "abc" }, $var ] [{ a: "abc" }, { b: 789 }]' + | ArrayValue.newArrayValue() + .values([ + buildObjectLiteral([ + a: StringValue.of("abc") + ]), + VariableReference.of("var") + ]).build() + | CoercedVariables.of("var": [b: 789]) + | [arg: [[a: "abc"], [b: 789]]] + + } + def "getArgumentValues: invalid oneOf input no values where passed - #testCase"() { given: "schema defining input object" def inputObjectType = newInputObject() @@ -847,7 +1272,7 @@ class ValuesResolverTest extends Specification { executionResult.data == null executionResult.errors.size() == 1 executionResult.errors[0].errorType == ErrorType.ValidationError - executionResult.errors[0].message == "Variable 'input' has an invalid value: Expected a value that can be converted to type 'Boolean' but it was a 'String'" + executionResult.errors[0].message == "Variable 'input' has an invalid value: Expected a Boolean input, but it was a 'String'" executionResult.errors[0].locations == [new SourceLocation(2, 35)] } @@ -885,7 +1310,7 @@ class ValuesResolverTest extends Specification { executionResult.data == null executionResult.errors.size() == 1 executionResult.errors[0].errorType == ErrorType.ValidationError - executionResult.errors[0].message == "Variable 'input' has an invalid value: Expected a value that can be converted to type 'Float' but it was a 'String'" + executionResult.errors[0].message == "Variable 'input' has an invalid value: Expected a Number input, but it was a 'String'" executionResult.errors[0].locations == [new SourceLocation(2, 35)] } } diff --git a/src/test/groovy/graphql/execution/incremental/DeferExecutionSupportIntegrationTest.groovy b/src/test/groovy/graphql/execution/incremental/DeferExecutionSupportIntegrationTest.groovy new file mode 100644 index 0000000000..e7844cc750 --- /dev/null +++ b/src/test/groovy/graphql/execution/incremental/DeferExecutionSupportIntegrationTest.groovy @@ -0,0 +1,1517 @@ +package graphql.execution.incremental + +import com.google.common.collect.Iterables +import graphql.Directives +import graphql.ExecutionInput +import graphql.ExecutionResult +import graphql.ExperimentalApi +import graphql.GraphQL +import graphql.TestUtil +import graphql.execution.pubsub.CapturingSubscriber +import graphql.incremental.DelayedIncrementalPartialResult +import graphql.incremental.IncrementalExecutionResult +import graphql.incremental.IncrementalExecutionResultImpl +import graphql.schema.DataFetcher +import graphql.schema.DataFetchingEnvironment +import graphql.schema.TypeResolver +import graphql.schema.idl.RuntimeWiring +import org.awaitility.Awaitility +import org.reactivestreams.Publisher +import spock.lang.Specification +import spock.lang.Unroll + +import java.util.concurrent.CompletableFuture + +import static graphql.schema.idl.TypeRuntimeWiring.newTypeWiring + +class DeferExecutionSupportIntegrationTest extends Specification { + def schemaSpec = ''' + type Query { + post : Post + posts: [Post] + postById(id: ID!): Post + hello: String + item(type: String!): Item + } + + interface Item { + id: ID! + summary: String + text: String + } + + type Page implements Item { + id: ID! + summary: String + text: String + views: Int + } + + type Post implements Item { + id: ID! + summary : String + text : String + latestComment: Comment + comments: [Comment] + resolvesToNull: String + dataFetcherError: String + coercionError: Int + typeMismatchError: [String] + nonNullableError: String! + wordCount: Int + } + + type Comment { + title: String + content: String + author: Person + } + + type Person { + name: String + avatar: String + } + + type Mutation { + addPost: Post + } + ''' + + GraphQL graphQL = null + + private static DataFetcher resolve(Object value) { + return resolve(value, 0, false) + } + + private static DataFetcher resolve(Object value, Integer sleepMs) { + return resolve(value, sleepMs, false) + } + + private static DataFetcher resolve(Object value, Integer sleepMs, boolean allowMultipleCalls) { + return new DataFetcher() { + boolean executed = false + @Override + Object get(DataFetchingEnvironment environment) throws Exception { + if(executed && !allowMultipleCalls) { + throw new IllegalStateException("This data fetcher can run only once") + } + executed = true + return CompletableFuture.supplyAsync { + Thread.sleep(sleepMs) + return value + } + } + } + } + + private static DataFetcher resolveItem() { + return (env) -> { + def type = env.getArgument("type") + + return CompletableFuture.supplyAsync { [__typename: type, id: "1001"] } + } + } + + private static TypeResolver itemTypeResolver() { + return (env) -> { + env.getSchema().getObjectType(env.object["__typename"]) + } + } + + private static DataFetcher resolveWithException() { + return new DataFetcher() { + @Override + Object get(DataFetchingEnvironment environment) throws Exception { + throw new RuntimeException("Bang!!!") + } + } + } + + void setup() { + def runtimeWiring = RuntimeWiring.newRuntimeWiring() + .type(newTypeWiring("Query") + .dataFetcher("post", resolve([id: "1001"])) + .dataFetcher("posts", resolve([ + [id: "1001"], + [id: "1002"], + [id: "1003"] + ])) + .dataFetcher("postById", (env) -> { + return [id: env.getArgument("id")] + }) + .dataFetcher("hello", resolve("world")) + .dataFetcher("item", resolveItem()) + ) + .type(newTypeWiring("Post").dataFetcher("summary", resolve("A summary", 10))) + .type(newTypeWiring("Post").dataFetcher("text", resolve("The full text", 100))) + .type(newTypeWiring("Post").dataFetcher("wordCount", resolve(45999, 10, true))) + .type(newTypeWiring("Post").dataFetcher("latestComment", resolve([title: "Comment title"], 10))) + .type(newTypeWiring("Post").dataFetcher("dataFetcherError", resolveWithException())) + .type(newTypeWiring("Post").dataFetcher("coercionError", resolve("Not a number", 10))) + .type(newTypeWiring("Post").dataFetcher("typeMismatchError", resolve([a: "A Map instead of a List"], 10))) + .type(newTypeWiring("Post").dataFetcher("nonNullableError", resolve(null))) + .type(newTypeWiring("Page").dataFetcher("summary", resolve("A page summary", 10))) + .type(newTypeWiring("Page").dataFetcher("text", resolve("The page full text", 100))) + .type(newTypeWiring("Comment").dataFetcher("content", resolve("Full content", 100))) + .type(newTypeWiring("Comment").dataFetcher("author", resolve([name: "Author name"], 10))) + .type(newTypeWiring("Person").dataFetcher("avatar", resolve("Avatar image", 100))) + .type(newTypeWiring("Mutation") + .dataFetcher("addPost", resolve([id: "1001"])) + ) + .type(newTypeWiring("Item") + .typeResolver(itemTypeResolver())) + .build() + + def schema = TestUtil.schema(schemaSpec, runtimeWiring) + .transform({ builder -> builder.additionalDirective(Directives.DeferDirective) }) + this.graphQL = GraphQL.newGraphQL(schema).build() + } + + def "simple defer"() { + def query = ''' + query { + post { + id + ... @defer { + summary + } + } + } + ''' + + when: + IncrementalExecutionResult initialResult = executeQuery(query) + + then: + initialResult.toSpecification() == [ + data : [post: [id: "1001"]], + hasNext: true + ] + + when: + def incrementalResults = getIncrementalResults(initialResult) + + then: + incrementalResults == [ + [ + hasNext : false, + incremental: [ + [ + path: ["post"], + data: [summary: "A summary"] + ] + ] + ] + ] + } + + def "defer with aliased fields"() { + def query = ''' + query { + postAlias: post { + idAlias: id + ... @defer { + summaryAlias: summary + } + } + } + ''' + + when: + IncrementalExecutionResult initialResult = executeQuery(query) + + then: + initialResult.toSpecification() == [ + data : [postAlias: [idAlias: "1001"]], + hasNext: true + ] + + when: + def incrementalResults = getIncrementalResults(initialResult) + + then: + incrementalResults == [ + [ + hasNext : false, + incremental: [ + [ + path: ["postAlias"], + data: [summaryAlias: "A summary"] + ] + ] + ] + ] + } + + def "aliased fields with different parameters"() { + def query = ''' + query { + postById(id: "1") { + id + } + ... @defer { + post2: postById(id: "2") { + id2: id + } + } + ... @defer(label: "defer-post3") { + post3: postById(id: "3") { + ... @defer(label: "defer-id3") { + id3: id + } + } + } + } + ''' + + when: + IncrementalExecutionResult initialResult = executeQuery(query) + + then: + initialResult.toSpecification() == [ + data : [postById: [id: "1"]], + hasNext: true + ] + + when: + def incrementalResults = getIncrementalResults(initialResult) + + then: + // Ordering is non-deterministic, so we assert on the things we know are going to be true. + + incrementalResults.size() == 3 + // only the last payload has "hasNext=true" + incrementalResults[0].hasNext == true + incrementalResults[1].hasNext == true + incrementalResults[2].hasNext == false + + // every payload has only 1 incremental item + incrementalResults.every { it.incremental.size() == 1 } + + incrementalResults.any { + it.incremental[0] == [path: [], data: [post2: [id2: "2"]]] + } + + // id3 HAS TO be delivered after post3 + def indexOfPost3 = Iterables.indexOf(incrementalResults, { + it.incremental[0] == [path: [], label: "defer-post3", data: [post3: [:]]] + }) + + def indexOfId3 = Iterables.indexOf(incrementalResults, { + it.incremental[0] == [path: ["post3"], label:"defer-id3", data: [id3: "3"]] + }) + + // Assert that both post3 and id3 are present + indexOfPost3 >= 0 + indexOfId3 >= 0 + // Assert that id3 is delivered after post3 + indexOfId3 > indexOfPost3 + } + + def "defer on interface field"() { + def query = """ + query { + item(type: "$type") { + __typename + id + ... on Item @defer { + summary + } + + ... on Post { + text + } + + ... on Page @defer { + text + } + } + } + """ + + when: + def initialResult = executeQuery(query) + + then: + if (type == "Post") { + assert initialResult.toSpecification() == [ + data : [item: [__typename: "Post", id: "1001", text: "The full text"]], + hasNext: true + ] + } else { + assert initialResult.toSpecification() == [ + data : [item: [__typename: "Page", id: "1001"]], + hasNext: true + ] + } + + when: + def incrementalResults = getIncrementalResults(initialResult) + + then: + if(type == "Post") { + assert incrementalResults == [ + [ + hasNext : false, + incremental: [ + [ + path: ["item"], + data: [summary: "A summary"] + ] + ] + ] + ] + } else { + assert incrementalResults == [ + [ + hasNext : true, + incremental: [ + [ + path: ["item"], + data: [summary: "A page summary"] + ] + ] + ], + [ + hasNext : false, + incremental: [ + [ + path: ["item"], + data: [text: "The page full text"] + ] + ] + ] + ] + } + + where: + type << ["Page", "Post"] + } + + def "defer execution is ignored if support for incremental delivery is disabled"() { + def query = ''' + query { + post { + id + ... @defer { + summary + } + } + } + ''' + + when: + ExecutionResult initialResult = executeQuery(query, false, [:]) + + then: + !(initialResult instanceof IncrementalExecutionResult) + + initialResult.toSpecification() == [ + data: [post: [id: "1001", summary: "A summary"]], + ] + + } + + def "simple defer with label"() { + def query = ''' + query { + post { + id + ... @defer(label: "summary-defer") { + summary + } + } + } + ''' + + when: + IncrementalExecutionResult initialResult = executeQuery(query) + + then: + initialResult.toSpecification() == [ + data : [post: [id: "1001"]], + hasNext: true + ] + + when: + def incrementalResults = getIncrementalResults(initialResult) + + then: + incrementalResults == [ + [ + hasNext : false, + incremental: [ + [ + path : ["post"], + label: "summary-defer", + data : [summary: "A summary"] + ] + ] + ] + ] + } + + def "defer with null label should behave as if no label was provided"() { + def query = ''' + query { + post { + id + ... @defer(label: null) { + summary + } + } + } + ''' + + when: + IncrementalExecutionResult initialResult = executeQuery(query) + + then: + initialResult.toSpecification() == [ + data : [post: [id: "1001"]], + hasNext: true + ] + + when: + def incrementalResults = getIncrementalResults(initialResult) + + then: + incrementalResults == [ + [ + hasNext : false, + incremental: [ + [ + path : ["post"], + data : [summary: "A summary"] + ] + ] + ] + ] + } + + def "deferred field results in 'null'"() { + def query = ''' + query { + post { + id + ... @defer { + resolvesToNull + } + } + } + ''' + + when: + IncrementalExecutionResult initialResult = executeQuery(query) + + then: + initialResult.toSpecification() == [ + data : [post: [id: "1001"]], + hasNext: true + ] + + when: + def incrementalResults = getIncrementalResults(initialResult) + + then: + incrementalResults == [ + [ + hasNext : false, + incremental: [ + [ + path : ["post"], + data : [resolvesToNull: null] + ] + ] + ] + ] + } + + def "simple defer with fragment definition"() { + def query = ''' + query { + post { + id + ... PostData @defer + } + } + + fragment PostData on Post { + summary + text + } + ''' + + when: + IncrementalExecutionResult initialResult = executeQuery(query) + + then: + initialResult.toSpecification() == [ + data : [post: [id: "1001"]], + hasNext: true + ] + + when: + def incrementalResults = getIncrementalResults(initialResult) + + then: + incrementalResults == [ + [ + hasNext : false, + incremental: [ + [ + path : ["post"], + data : [summary: "A summary", text: "The full text"] + ] + ] + ] + ] + } + + @Unroll + def "defer with 'if: #ifValue' "() { + def query = """ + query { + post { + id + ... @defer(if: $ifValue) { + summary + } + } + } + """ + + when: + ExecutionResult executionResult = executeQuery(query) + + then: + if (ifValue) { + assert executionResult instanceof IncrementalExecutionResultImpl + + assert executionResult.toSpecification() == [ + data : [post: [id: "1001"]], + hasNext: true + ] + def incrementalResults = getIncrementalResults(executionResult) + + assert incrementalResults == [ + [ + hasNext : false, + incremental: [ + [ + path: ["post"], + data: [summary: "A summary"] + ] + ] + ] + ] + } else { + assert !(executionResult instanceof IncrementalExecutionResult) + + assert executionResult.toSpecification() == [ + data: [post: [id: "1001", summary: "A summary"]], + ] + } + + where: + ifValue << [true, false] + } + + @Unroll + def "defer with 'if: #ifValue' passed as variable "() { + def query = """ + query(\$ifVar: Boolean!) { + post { + id + ... @defer(if: \$ifVar) { + summary + } + } + } + """ + + when: + ExecutionResult executionResult = executeQuery(query, [ifVar: ifValue]) + + then: + if (ifValue) { + assert executionResult instanceof IncrementalExecutionResultImpl + + assert executionResult.toSpecification() == [ + data : [post: [id: "1001"]], + hasNext: true + ] + def incrementalResults = getIncrementalResults(executionResult) + + assert incrementalResults == [ + [ + hasNext : false, + incremental: [ + [ + path: ["post"], + data: [summary: "A summary"] + ] + ] + ] + ] + } else { + assert !(executionResult instanceof IncrementalExecutionResult) + + assert executionResult.toSpecification() == [ + data: [post: [id: "1001", summary: "A summary"]], + ] + } + + where: + ifValue << [true, false] + } + + def "2 fields deferred together"() { + def query = ''' + query { + post { + id + ... @defer { + summary + text + } + } + } + ''' + + when: + IncrementalExecutionResult initialResult = executeQuery(query) + + then: + initialResult.toSpecification() == [ + data : [post: [id: "1001"]], + hasNext: true + ] + + when: + def incrementalResults = getIncrementalResults(initialResult) + + then: + incrementalResults == [ + [ + hasNext : false, + incremental: [ + [ + path: ["post"], + data: [summary: "A summary", text: "The full text"] + ] + ] + ] + ] + } + + def "2 fields deferred independently"() { + def query = ''' + query { + post { + id + ... @defer(label: "summary-defer") { + summary + } + ... @defer(label: "text-defer") { + text + } + } + } + ''' + + when: + IncrementalExecutionResult initialResult = executeQuery(query) + + then: + initialResult.toSpecification() == [ + data : [post: [id: "1001"]], + hasNext: true + ] + + when: + def incrementalResults = getIncrementalResults(initialResult) + + then: + incrementalResults == [ + [ + hasNext : true, + incremental: [ + [ + label: "summary-defer", + path: ["post"], + data: [summary: "A summary"] + ] + ] + ], + [ + hasNext : false, + incremental: [ + [ + label: "text-defer", + path: ["post"], + data: [text: "The full text"] + ] + ] + ] + ] + } + + def "order of @defer definition in query doesn't affect order of incremental payloads in response"() { + def query = ''' + query { + post { + id + # "text" is defined before "summary" in the query, but it's slower, so it will be delivered after. + ... @defer { + text + } + ... @defer { + summary + } + } + } + ''' + + when: + IncrementalExecutionResult initialResult = executeQuery(query) + + then: + initialResult.toSpecification() == [ + data : [post: [id: "1001"]], + hasNext: true + ] + + when: + def incrementalResults = getIncrementalResults(initialResult) + + then: + incrementalResults == [ + [ + hasNext : true, + incremental: [ + [ + path: ["post"], + data: [summary: "A summary"] + ] + ] + ], + [ + hasNext : false, + incremental: [ + [ + path: ["post"], + data: [text: "The full text"] + ] + ] + ] + ] + } + + def "keeps the fields named correctly when interspersed in the query"() { + def query = ''' + query { + post { + firstId: id + ... @defer { + text + } + secondId: id + ... @defer { + summary + } + thirdId: id + } + } + ''' + + when: + IncrementalExecutionResult initialResult = executeQuery(query) + + then: + initialResult.toSpecification() == [ + data : [post: [firstId: "1001", secondId: "1001", thirdId: "1001"]], + hasNext: true + ] + + when: + def incrementalResults = getIncrementalResults(initialResult) + + then: + incrementalResults == [ + [ + hasNext : true, + incremental: [ + [ + path: ["post"], + data: [summary: "A summary"] + ] + ] + ], + [ + hasNext : false, + incremental: [ + [ + path: ["post"], + data: [text: "The full text"] + ] + ] + ] + ] + } + + def "defer result in initial result being empty object"() { + def query = ''' + query { + post { + ... @defer { + summary + } + } + } + ''' + + when: + IncrementalExecutionResult initialResult = executeQuery(query) + + then: + initialResult.toSpecification() == [ + data : [post: [:]], + hasNext: true + ] + + when: + def incrementalResults = getIncrementalResults(initialResult) + + then: + incrementalResults == [ + [ + hasNext : false, + incremental: [ + [ + path: ["post"], + data: [summary: "A summary"] + ] + ] + ] + ] + } + + def "defer on top level field"() { + def query = ''' + query { + hello + ... @defer { + post { + id + } + } + } + ''' + + when: + IncrementalExecutionResult initialResult = executeQuery(query) + + then: + initialResult.toSpecification() == [ + data : [hello: "world"], + hasNext: true + ] + + when: + def incrementalResults = getIncrementalResults(initialResult) + + then: + incrementalResults == [ + [ + hasNext : false, + incremental: [ + [ + path: [], + data: [post: [id: "1001"]] + ] + ] + ] + ] + } + + def "nested defers"() { + def query = ''' + query { + ... @defer { + post { + id + ... @defer { + summary + latestComment { + title + ... @defer { + content + } + } + } + } + } + } + ''' + + when: + IncrementalExecutionResult initialResult = executeQuery(query) + + then: + initialResult.toSpecification() == [ + data : [:], + hasNext: true + ] + + when: + def incrementalResults = getIncrementalResults(initialResult) + + then: + incrementalResults == [ + [ + hasNext : true, + incremental: [ + [ + path: [], + data: [post: [id: "1001"]] + ] + ] + ], + [ + hasNext : true, + incremental: [ + [ + path: ["post"], + data: [summary: "A summary", latestComment: [title: "Comment title"]] + ] + ] + ], + [ + hasNext : false, + incremental: [ + [ + path: ["post", "latestComment"], + data: [content: "Full content"] + ] + ] + ] + ] + } + + def "multiple defers on same field"() { + + def query = ''' + query { + post { + ... @defer { + summary + } + ... @defer(label: "defer-outer") { + summary + ... @defer(label: "defer-inner") { + summary + } + } + } + } + ''' + + when: + + IncrementalExecutionResult initialResult = executeQuery(query) + + then: + initialResult.toSpecification() == [ + data : [post: [:]], + hasNext: true + ] + + when: + def incrementalResults = getIncrementalResults(initialResult) + + then: + // Ordering is non-deterministic, so we assert on the things we know are going to be true. + + incrementalResults.size() == 3 + // only the last payload has "hasNext=true" + incrementalResults[0].hasNext == true + incrementalResults[1].hasNext == true + incrementalResults[2].hasNext == false + + // every payload has only 1 incremental item, and the data is the same for all of them + incrementalResults.every { it.incremental.size() == 1 } + incrementalResults.every { it.incremental[0].data == [summary: "A summary"] } + + // "label" is different for every payload + incrementalResults.any { it.incremental[0].label == null } + incrementalResults.any { it.incremental[0].label == "defer-inner" } + incrementalResults.any { it.incremental[0].label == "defer-outer" } + } + + def "mutations can have defers"() { + def query = ''' + mutation { + addPost { + firstId: id + ... @defer { + text + } + secondId: id + ... @defer { + summary + } + thirdId: id + } + } + ''' + + when: + IncrementalExecutionResult initialResult = executeQuery(query) + + then: + initialResult.toSpecification() == [ + data : [addPost: [firstId: "1001", secondId: "1001", thirdId: "1001"]], + hasNext: true + ] + + when: + def incrementalResults = getIncrementalResults(initialResult) + + then: + incrementalResults == [ + [ + hasNext : true, + incremental: [ + [ + path: ["addPost"], + data: [summary: "A summary"] + ] + ] + ], + [ + hasNext : false, + incremental: [ + [ + path: ["addPost"], + data: [text: "The full text"] + ] + ] + ] + ] + } + + def "can handle error raised by data fetcher"() { + def query = ''' + query { + post { + id + ... @defer { + dataFetcherError + } + ... @defer { + text + } + } + } + ''' + + when: + def initialResult = executeQuery(query) + + then: + initialResult.toSpecification() == [ + data : [post: [id: "1001"]], + hasNext: true + ] + + when: + def incrementalResults = getIncrementalResults(initialResult) + + then: + incrementalResults == [ + [ + hasNext : true, + incremental: [ + [ + path : ["post"], + data : [dataFetcherError: null], + errors: [[ + message : "Exception while fetching data (/post/dataFetcherError) : Bang!!!", + locations : [[line: 6, column: 25]], + path : ["post", "dataFetcherError"], + extensions: [classification: "DataFetchingException"] + ]], + ], + ] + ], + [ + hasNext : false, + incremental: [ + [ + path: ["post"], + data: [text: "The full text"], + ] + ] + ] + ] + } + + def "can handle UnresolvedTypeException"() { + def query = """ + query { + post { + id + ... @defer { + text + } + } + ... @defer { + item(type: "NonExistingType") { + id + summary + } + } + } + """ + + when: + def initialResult = executeQuery(query) + + then: + initialResult.toSpecification() == [ + data : [post: [id: "1001"]], + hasNext: true + ] + + when: + def incrementalResults = getIncrementalResults(initialResult) + + then: + incrementalResults == [ + [ + hasNext : true, + incremental: [ + [ + path : [], + data : [item: null], + errors: [ + [ + message : "Can't resolve '/item'. Abstract type 'Item' must resolve to an Object type at runtime for field 'Query.item'. Could not determine the exact type of 'Item'", + path : ["item"], + extensions: [ + classification: "DataFetchingException" + ] + ] + ], + ] + ] + ], + [ + hasNext : false, + incremental: [ + [ + path : ["post"], + data : [text: "The full text"], + ] + ] + ] + ] + } + + def "can handle coercion problem"() { + def query = """ + query { + post { + id + ... @defer { + text + } + ... @defer { + coercionError + } + } + } + """ + + when: + def initialResult = executeQuery(query) + + then: + initialResult.toSpecification() == [ + data : [post: [id: "1001"]], + hasNext: true + ] + + when: + def incrementalResults = getIncrementalResults(initialResult) + + then: + incrementalResults == [ + [ + hasNext : true, + incremental: [ + [ + path : ["post"], + data : [coercionError: null], + errors: [ + [ + message : "Can't serialize value (/post/coercionError) : Expected a value that can be converted to type 'Int' but it was a 'String'", + path : ["post", "coercionError"], + extensions: [ + classification: "DataFetchingException" + ] + ] + ], + ] + ] + ], + [ + hasNext : false, + incremental: [ + [ + path : ["post"], + data : [text: "The full text"], + ] + ] + ] + ] + } + + def "can handle type mismatch problem"() { + def query = """ + query { + post { + id + ... @defer { + text + } + ... @defer { + typeMismatchError + } + } + } + """ + + when: + def initialResult = executeQuery(query) + + then: + initialResult.toSpecification() == [ + data : [post: [id: "1001"]], + hasNext: true + ] + + when: + def incrementalResults = getIncrementalResults(initialResult) + + then: + incrementalResults == [ + [ + hasNext : true, + incremental: [ + [ + path : ["post"], + data : [typeMismatchError: null], + errors: [ + [ + message : "Can't resolve value (/post/typeMismatchError) : type mismatch error, expected type LIST", + path : ["post", "typeMismatchError"], + extensions: [ + classification: "DataFetchingException" + ] + ] + ], + ] + ] + ], + [ + hasNext : false, + incremental: [ + [ + path : ["post"], + data : [text: "The full text"], + ] + ] + ] + ] + } + + def "can handle non nullable error in one of the defer calls"() { + def query = """ + query { + post { + id + ... @defer { + text + } + ... @defer { + summary + nonNullableError + } + } + } + """ + + when: + def initialResult = executeQuery(query) + + then: + initialResult.toSpecification() == [ + data : [post: [id: "1001"]], + hasNext: true + ] + + when: + def incrementalResults = getIncrementalResults(initialResult) + + then: + incrementalResults == [ + [ + hasNext : true, + incremental: [ + [ + path : ["post"], + errors: [ + [ + message : "The field at path '/post/nonNullableError' was declared as a non null type, but the code involved in retrieving data has wrongly returned a null value. The graphql specification requires that the parent field be set to null, or if that is non nullable that it bubble up null to its parent and so on. The non-nullable type is 'String' within parent type 'Post'", + path : ["post", "nonNullableError"], + extensions: [ + classification: "NullValueInNonNullableField" + ] + ] + ], + ] + ] + ], + [ + hasNext : false, + incremental: [ + [ + path : ["post"], + data : [text: "The full text"], + ] + ] + ] + ] + } + + def "can handle non nullable error in the initial result"() { + def query = """ + query { + post { + id + nonNullableError + ... @defer { + summary + } + } + } + """ + + when: + def initialResult = executeQuery(query) + + then: + initialResult.toSpecification() == [ + data : [post: null], + errors : [ + [message : "The field at path '/post/nonNullableError' was declared as a non null type, but the code involved in retrieving data has wrongly returned a null value. The graphql specification requires that the parent field be set to null, or if that is non nullable that it bubble up null to its parent and so on. The non-nullable type is 'String' within parent type 'Post'", + path : ["post", "nonNullableError"], + extensions: [classification: "NullValueInNonNullableField"] + ] + ], + hasNext: true + ] + + when: + def incrementalResults = getIncrementalResults(initialResult) + + then: + incrementalResults == [ + [ + hasNext : false, + incremental: [ + [ + path: ["post"], + data: [summary: "A summary"], + ] + ] + ] + ] + } + + def "defer on list items"() { + def query = ''' + query { + posts { + id + ... @defer { + wordCount + } + } + } + ''' + + when: + def initialResult = executeQuery(query) + + then: + initialResult.toSpecification() == [ + data : [posts: [[id: "1001"], [id: "1002"], [id: "1003"]]], + hasNext: true + ] + + when: + def incrementalResults = getIncrementalResults(initialResult) + + then: + // Ordering is non-deterministic, so we assert on the things we know are going to be true. + + incrementalResults.size() == 3 + // only the last payload has "hasNext=true" + incrementalResults[0].hasNext == true + incrementalResults[1].hasNext == true + incrementalResults[2].hasNext == false + + // every payload has only 1 incremental item, and the data is the same for all of them + incrementalResults.every { it.incremental.size() == 1 } + incrementalResults.every { it.incremental[0].data == [wordCount: 45999] } + + // path is different for every payload + incrementalResults.any { it.incremental[0].path == ["posts", 0] } + incrementalResults.any { it.incremental[0].path == ["posts", 1] } + incrementalResults.any { it.incremental[0].path == ["posts", 2] } + } + + private ExecutionResult executeQuery(String query) { + return this.executeQuery(query, true, [:]) + } + + private ExecutionResult executeQuery(String query, Map variables) { + return this.executeQuery(query, true, variables) + } + + private ExecutionResult executeQuery(String query, boolean incrementalSupport, Map variables) { + return graphQL.execute( + ExecutionInput.newExecutionInput() + .graphQLContext([(ExperimentalApi.ENABLE_INCREMENTAL_SUPPORT): incrementalSupport]) + .query(query) + .variables(variables) + .build() + ) + } + + private static List> getIncrementalResults(IncrementalExecutionResult initialResult) { + Publisher deferredResultStream = initialResult.incrementalItemPublisher + + def subscriber = new CapturingSubscriber() + + deferredResultStream.subscribe(subscriber) + + Awaitility.await().untilTrue(subscriber.isDone()) + + return subscriber.getEvents() + .collect { it.toSpecification() } + } +} diff --git a/src/test/groovy/graphql/execution/incremental/DeferredCallTest.groovy b/src/test/groovy/graphql/execution/incremental/DeferredCallTest.groovy new file mode 100644 index 0000000000..252d6ca449 --- /dev/null +++ b/src/test/groovy/graphql/execution/incremental/DeferredCallTest.groovy @@ -0,0 +1,118 @@ +package graphql.execution.incremental + +import graphql.ExecutionResultImpl +import graphql.GraphQLError +import graphql.execution.NonNullableFieldWasNullException +import graphql.execution.ResultPath +import spock.lang.Specification + +import java.util.concurrent.CompletableFuture +import java.util.function.Supplier + +import static graphql.execution.ResultPath.parse +import static java.util.concurrent.CompletableFuture.completedFuture + +class DeferredCallTest extends Specification { + + def "test call capture gives a CF"() { + given: + DeferredFragmentCall call = new DeferredFragmentCall("my-label", parse("/path"), + [createResolvedFieldCall("field", "some data")], new DeferredCallContext()) + + when: + def future = call.invoke() + then: + future.join().toSpecification() == [ + label: "my-label", + path : ["path"], + data : [field: "some data"] + ] + } + + def "multiple field calls are resolved together"() { + given: + DeferredFragmentCall call = new DeferredFragmentCall("my-label", parse("/path"), + [ + createResolvedFieldCall("field1", "some data 1"), + createResolvedFieldCall("field2", "some data 2"), + createResolvedFieldCall("field3", "some data 3") + ], + new DeferredCallContext() + ) + + when: + def future = call.invoke() + then: + future.join().toSpecification() == [ + label: "my-label", + path : ["path"], + data : [field1: "some data 1", field2: "some data 2", field3: "some data 3"] + ] + } + + def "can handle non-nullable field error"() { + given: + def deferredCallContext = new DeferredCallContext() + def mockedException = Mock(NonNullableFieldWasNullException) { + getMessage() >> "Field value can't be null" + getPath() >> ResultPath.parse("/path") + } + + DeferredFragmentCall call = new DeferredFragmentCall("my-label", parse("/path"), [ + createFieldCallThatThrowsException(mockedException), + createResolvedFieldCall("field1", "some data") + ], deferredCallContext) + + when: + def future = call.invoke() + def deferPayload = future.join() + + then: + deferPayload.toSpecification() == [ + path : ["path"], + label : "my-label", + errors: [ + [ + message : "Field value can't be null", + path : ["path"], + extensions: [classification: "NullValueInNonNullableField"] + ] + ], + ] + } + + private static Supplier> createResolvedFieldCall( + String fieldName, + Object data + ) { + return createResolvedFieldCall(fieldName, data, Collections.emptyList()) + } + + private static Supplier> createResolvedFieldCall( + String fieldName, + Object data, + List errors + ) { + return new Supplier>() { + @Override + CompletableFuture get() { + return completedFuture( + new DeferredFragmentCall.FieldWithExecutionResult(fieldName, + new ExecutionResultImpl(data, errors) + ) + ) + } + } + } + + private static Supplier> createFieldCallThatThrowsException( + Throwable exception + ) { + return new Supplier>() { + @Override + CompletableFuture get() { + return CompletableFuture.failedFuture(exception) + } + } + } +} diff --git a/src/test/groovy/graphql/execution/incremental/IncrementalCallStateDeferTest.groovy b/src/test/groovy/graphql/execution/incremental/IncrementalCallStateDeferTest.groovy new file mode 100644 index 0000000000..8634458122 --- /dev/null +++ b/src/test/groovy/graphql/execution/incremental/IncrementalCallStateDeferTest.groovy @@ -0,0 +1,249 @@ +package graphql.execution.incremental + + +import graphql.ExecutionResultImpl +import graphql.execution.ResultPath +import graphql.incremental.DelayedIncrementalPartialResult +import org.awaitility.Awaitility +import spock.lang.Specification + +import java.util.concurrent.CompletableFuture +import java.util.function.Supplier + +class IncrementalCallStateDeferTest extends Specification { + + def "emits N deferred calls - ordering depends on call latency"() { + given: + def incrementalCallState = new IncrementalCallState() + incrementalCallState.enqueue(offThread("A", 100, "/field/path")) // <-- will finish last + incrementalCallState.enqueue(offThread("B", 50, "/field/path")) // <-- will finish second + incrementalCallState.enqueue(offThread("C", 10, "/field/path")) // <-- will finish first + + when: + List results = startAndWaitCalls(incrementalCallState) + + then: + assertResultsSizeAndHasNextRule(3, results) + results[0].incremental[0].data["c"] == "C" + results[1].incremental[0].data["b"] == "B" + results[2].incremental[0].data["a"] == "A" + } + + def "calls within calls are enqueued correctly"() { + given: + def incrementalCallState = new IncrementalCallState() + incrementalCallState.enqueue(offThreadCallWithinCall(incrementalCallState, "A", "A_Child", 500, "/a")) + incrementalCallState.enqueue(offThreadCallWithinCall(incrementalCallState, "B", "B_Child", 300, "/b")) + incrementalCallState.enqueue(offThreadCallWithinCall(incrementalCallState, "C", "C_Child", 100, "/c")) + + when: + List results = startAndWaitCalls(incrementalCallState) + + then: + assertResultsSizeAndHasNextRule(6, results) + results[0].incremental[0].data["c"] == "C" + results[1].incremental[0].data["c_child"] == "C_Child" + results[2].incremental[0].data["b"] == "B" + results[3].incremental[0].data["a"] == "A" + results[4].incremental[0].data["b_child"] == "B_Child" + results[5].incremental[0].data["a_child"] == "A_Child" + } + + def "stops at first exception encountered"() { + given: + def incrementalCallState = new IncrementalCallState() + incrementalCallState.enqueue(offThread("A", 100, "/field/path")) + incrementalCallState.enqueue(offThread("Bang", 50, "/field/path")) // <-- will throw exception + incrementalCallState.enqueue(offThread("C", 10, "/field/path")) + + when: + def subscriber = new graphql.execution.pubsub.CapturingSubscriber() { + @Override + void onComplete() { + assert false, "This should not be called!" + } + } + incrementalCallState.startDeferredCalls().subscribe(subscriber) + + Awaitility.await().untilTrue(subscriber.isDone()) + + def results = subscriber.getEvents() + def thrown = subscriber.getThrowable() + + then: + thrown.message == "java.lang.RuntimeException: Bang" + results[0].incremental[0].data["c"] == "C" + } + + def "you can cancel the subscription"() { + given: + def incrementalCallState = new IncrementalCallState() + incrementalCallState.enqueue(offThread("A", 100, "/field/path")) // <-- will finish last + incrementalCallState.enqueue(offThread("B", 50, "/field/path")) // <-- will finish second + incrementalCallState.enqueue(offThread("C", 10, "/field/path")) // <-- will finish first + + when: + def subscriber = new graphql.execution.pubsub.CapturingSubscriber() { + @Override + void onNext(DelayedIncrementalPartialResult executionResult) { + this.getEvents().add(executionResult) + subscription.cancel() + this.isDone().set(true) + } + } + incrementalCallState.startDeferredCalls().subscribe(subscriber) + + Awaitility.await().untilTrue(subscriber.isDone()) + def results = subscriber.getEvents() + + then: + results.size() == 1 + results[0].incremental[0].data["c"] == "C" + // Cancelling the subscription will result in an invalid state. + // The last result item will have "hasNext=true" (but there will be no next) + results[0].hasNext + } + + def "you can't subscribe twice"() { + given: + def incrementalCallState = new IncrementalCallState() + incrementalCallState.enqueue(offThread("A", 100, "/field/path")) + incrementalCallState.enqueue(offThread("Bang", 50, "/field/path")) // <-- will finish second + incrementalCallState.enqueue(offThread("C", 10, "/field/path")) // <-- will finish first + + when: + def subscriber1 = new graphql.execution.pubsub.CapturingSubscriber() + def subscriber2 = new graphql.execution.pubsub.CapturingSubscriber() + incrementalCallState.startDeferredCalls().subscribe(subscriber1) + incrementalCallState.startDeferredCalls().subscribe(subscriber2) + + then: + subscriber2.throwable != null + subscriber2.throwable.message == "This publisher only supports one subscriber" + } + + def "indicates if there are any defers present"() { + given: + def incrementalCallState = new IncrementalCallState() + + when: + def deferPresent1 = incrementalCallState.getIncrementalCallsDetected() + + then: + !deferPresent1 + + when: + incrementalCallState.enqueue(offThread("A", 100, "/field/path")) + def deferPresent2 = incrementalCallState.getIncrementalCallsDetected() + + then: + deferPresent2 + } + + def "multiple fields are part of the same call"() { + given: "a DeferredCall that contains resolution of multiple fields" + def call1 = new Supplier>() { + @Override + CompletableFuture get() { + return CompletableFuture.supplyAsync({ + Thread.sleep(10) + new DeferredFragmentCall.FieldWithExecutionResult("call1", new ExecutionResultImpl("Call 1", [])) + }) + } + } + + def call2 = new Supplier>() { + @Override + CompletableFuture get() { + return CompletableFuture.supplyAsync({ + Thread.sleep(100) + new DeferredFragmentCall.FieldWithExecutionResult("call2", new ExecutionResultImpl("Call 2", [])) + }) + } + } + + def deferredCall = new DeferredFragmentCall(null, ResultPath.parse("/field/path"), [call1, call2], new DeferredCallContext()) + + when: + def incrementalCallState = new IncrementalCallState() + incrementalCallState.enqueue(deferredCall) + + def results = startAndWaitCalls(incrementalCallState) + + then: + assertResultsSizeAndHasNextRule(1, results) + results[0].incremental[0].data["call1"] == "Call 1" + results[0].incremental[0].data["call2"] == "Call 2" + } + + def "race conditions should not impact the calculation of the hasNext value"() { + given: "calls that have the same sleepTime" + def incrementalCallState = new IncrementalCallState() + incrementalCallState.enqueue(offThread("A", 10, "/field/path")) // <-- will finish last + incrementalCallState.enqueue(offThread("B", 10, "/field/path")) // <-- will finish second + incrementalCallState.enqueue(offThread("C", 10, "/field/path")) // <-- will finish first + + when: + List results = startAndWaitCalls(incrementalCallState) + + then: "hasNext placement should be deterministic - only the last event published should have 'hasNext=true'" + assertResultsSizeAndHasNextRule(3, results) + + then: "but the actual order or publish events is non-deterministic - they all have the same latency (sleepTime)." + results.any { it.incremental[0].data["a"] == "A" } + results.any { it.incremental[0].data["b"] == "B" } + results.any { it.incremental[0].data["c"] == "C" } + } + + private static DeferredFragmentCall offThread(String data, int sleepTime, String path) { + def callSupplier = new Supplier>() { + @Override + CompletableFuture get() { + return CompletableFuture.supplyAsync({ + Thread.sleep(sleepTime) + if (data == "Bang") { + throw new RuntimeException(data) + } + new DeferredFragmentCall.FieldWithExecutionResult(data.toLowerCase(), new ExecutionResultImpl(data, [])) + }) + } + } + + return new DeferredFragmentCall(null, ResultPath.parse(path), [callSupplier], new DeferredCallContext()) + } + + private static DeferredFragmentCall offThreadCallWithinCall(IncrementalCallState incrementalCallState, String dataParent, String dataChild, int sleepTime, String path) { + def callSupplier = new Supplier>() { + @Override + CompletableFuture get() { + CompletableFuture.supplyAsync({ + Thread.sleep(sleepTime) + incrementalCallState.enqueue(offThread(dataChild, sleepTime, path)) + new DeferredFragmentCall.FieldWithExecutionResult(dataParent.toLowerCase(), new ExecutionResultImpl(dataParent, [])) + }) + } + } + return new DeferredFragmentCall(null, ResultPath.parse("/field/path"), [callSupplier], new DeferredCallContext()) + } + + private static void assertResultsSizeAndHasNextRule(int expectedSize, List results) { + assert results.size() == expectedSize + + for (def i = 0; i < results.size(); i++) { + def isLastResult = i == results.size() - 1 + def hasNext = results[i].hasNext() + + assert (hasNext && !isLastResult) + || (!hasNext && isLastResult) + } + } + + private static List startAndWaitCalls(IncrementalCallState incrementalCallState) { + def subscriber = new graphql.execution.pubsub.CapturingSubscriber() + + incrementalCallState.startDeferredCalls().subscribe(subscriber) + + Awaitility.await().untilTrue(subscriber.isDone()) + return subscriber.getEvents() + } +} diff --git a/src/test/groovy/graphql/execution/instrumentation/AllNullTestingInstrumentation.groovy b/src/test/groovy/graphql/execution/instrumentation/AllNullTestingInstrumentation.groovy index 87ef7800f0..d702335c4d 100644 --- a/src/test/groovy/graphql/execution/instrumentation/AllNullTestingInstrumentation.groovy +++ b/src/test/groovy/graphql/execution/instrumentation/AllNullTestingInstrumentation.groovy @@ -25,11 +25,11 @@ class AllNullTestingInstrumentation implements Instrumentation { List executionList = [] List dfInvocations = [] List dfClasses = [] - Map capturedData = [:] + Map capturedData = [:] @Override - InstrumentationState createState(InstrumentationCreateStateParameters parameters) { - return instrumentationState + CompletableFuture createStateAsync(InstrumentationCreateStateParameters parameters) { + return CompletableFuture.completedFuture(instrumentationState) } @Override @@ -60,6 +60,13 @@ class AllNullTestingInstrumentation implements Instrumentation { return null } + @Override + ExecuteObjectInstrumentationContext beginExecuteObject(InstrumentationExecutionStrategyParameters parameters, InstrumentationState state) { + assert state == instrumentationState + executionList << "start:execute-object" + return null + } + @Override InstrumentationContext beginExecuteOperation(InstrumentationExecuteOperationParameters parameters, InstrumentationState state) { assert state == instrumentationState @@ -75,7 +82,7 @@ class AllNullTestingInstrumentation implements Instrumentation { } @Override - InstrumentationContext beginField(InstrumentationFieldParameters parameters, InstrumentationState state) { + InstrumentationContext beginFieldExecution(InstrumentationFieldParameters parameters, InstrumentationState state) { assert state == instrumentationState executionList << "start:field-$parameters.field.name" return null @@ -89,14 +96,14 @@ class AllNullTestingInstrumentation implements Instrumentation { } @Override - InstrumentationContext beginFieldComplete(InstrumentationFieldCompleteParameters parameters, InstrumentationState state) { + InstrumentationContext beginFieldCompletion(InstrumentationFieldCompleteParameters parameters, InstrumentationState state) { assert state == instrumentationState executionList << "start:complete-$parameters.field.name" return null } @Override - InstrumentationContext beginFieldListComplete(InstrumentationFieldCompleteParameters parameters, InstrumentationState state) { + InstrumentationContext beginFieldListCompletion(InstrumentationFieldCompleteParameters parameters, InstrumentationState state) { assert state == instrumentationState executionList << "start:complete-list-$parameters.field.name" return null diff --git a/src/test/groovy/graphql/execution/instrumentation/ChainedInstrumentationStateTest.groovy b/src/test/groovy/graphql/execution/instrumentation/ChainedInstrumentationStateTest.groovy index f1812e3955..88d7c86538 100644 --- a/src/test/groovy/graphql/execution/instrumentation/ChainedInstrumentationStateTest.groovy +++ b/src/test/groovy/graphql/execution/instrumentation/ChainedInstrumentationStateTest.groovy @@ -21,9 +21,12 @@ class ChainedInstrumentationStateTest extends Specification { def a = new NamedInstrumentation("A") def b = new NamedInstrumentation("B") def c = new NamedInstrumentation("C") + def nullState = new SimplePerformantInstrumentation() + def chainedInstrumentation = new ChainedInstrumentation([ a, b, + nullState, c, ]) @@ -53,7 +56,7 @@ class ChainedInstrumentationStateTest extends Specification { "end:fetch-hero", "start:complete-hero", - "start:execution-strategy", + "start:execute-object", "start:field-id", "start:fetch-id", @@ -62,7 +65,7 @@ class ChainedInstrumentationStateTest extends Specification { "end:complete-id", "end:field-id", - "end:execution-strategy", + "end:execute-object", "end:complete-hero", "end:field-hero", @@ -87,6 +90,8 @@ class ChainedInstrumentationStateTest extends Specification { then: + chainedInstrumentation.getInstrumentations().size() == 4 + a.executionList == expected b.executionList == expected c.executionList == expected @@ -139,7 +144,7 @@ class ChainedInstrumentationStateTest extends Specification { "end:fetch-hero", "start:complete-hero", - "start:execution-strategy", + "start:execute-object", "start:field-id", "start:fetch-id", @@ -148,7 +153,7 @@ class ChainedInstrumentationStateTest extends Specification { "end:complete-id", "end:field-id", - "end:execution-strategy", + "end:execute-object", "end:complete-hero", "end:field-hero", @@ -179,7 +184,7 @@ class ChainedInstrumentationStateTest extends Specification { "end:fetch-hero", "start:complete-hero", - "start:execution-strategy", + "start:execute-object", "start:field-id", "start:fetch-id", @@ -188,7 +193,7 @@ class ChainedInstrumentationStateTest extends Specification { "end:complete-id", "end:field-id", - "end:execution-strategy", + "end:execute-object", "end:complete-hero", "end:field-hero", @@ -333,7 +338,6 @@ class ChainedInstrumentationStateTest extends Specification { def graphQL = GraphQL .newGraphQL(StarWarsSchema.starWarsSchema) .instrumentation(new ChainedInstrumentation([instrumentation1, instrumentation2])) - .doNotAddDefaultInstrumentations() // important, otherwise a chained one wil be used .build() when: diff --git a/src/test/groovy/graphql/execution/instrumentation/InstrumentationTest.groovy b/src/test/groovy/graphql/execution/instrumentation/InstrumentationTest.groovy index 85b274089a..a92cf94518 100644 --- a/src/test/groovy/graphql/execution/instrumentation/InstrumentationTest.groovy +++ b/src/test/groovy/graphql/execution/instrumentation/InstrumentationTest.groovy @@ -58,7 +58,7 @@ class InstrumentationTest extends Specification { "onDispatched:fetch-hero", "end:fetch-hero", "start:complete-hero", - "start:execution-strategy", + "start:execute-object", "start:field-id", "start:fetch-id", "onDispatched:fetch-id", @@ -68,8 +68,8 @@ class InstrumentationTest extends Specification { "end:complete-id", "onDispatched:field-id", "end:field-id", - "onDispatched:execution-strategy", - "end:execution-strategy", + "onDispatched:execute-object", + "end:execute-object", "onDispatched:complete-hero", "end:complete-hero", "onDispatched:field-hero", @@ -127,8 +127,9 @@ class InstrumentationTest extends Specification { """ def instrumentation = new LegacyTestingInstrumentation() { + @Override - DataFetcher instrumentDataFetcher(DataFetcher dataFetcher, InstrumentationFieldFetchParameters parameters) { + DataFetcher instrumentDataFetcher(DataFetcher dataFetcher, InstrumentationFieldFetchParameters parameters, InstrumentationState state) { return new DataFetcher() { @Override Object get(DataFetchingEnvironment environment) { @@ -171,7 +172,7 @@ class InstrumentationTest extends Specification { return new ExecutionStrategyInstrumentationContext() { @Override - void onDispatched(CompletableFuture result) { + void onDispatched() { System.out.println(String.format("t%s setting go signal on", Thread.currentThread().getId())) goSignal.set(true) } @@ -313,7 +314,7 @@ class InstrumentationTest extends Specification { "onDispatched:fetch-hero", "end:fetch-hero", "start:complete-hero", - "start:execution-strategy", + "start:execute-object", "start:field-id", "start:fetch-id", "onDispatched:fetch-id", @@ -323,8 +324,8 @@ class InstrumentationTest extends Specification { "end:complete-id", "onDispatched:field-id", "end:field-id", - "onDispatched:execution-strategy", - "end:execution-strategy", + "onDispatched:execute-object", + "end:execute-object", "onDispatched:complete-hero", "end:complete-hero", "onDispatched:field-hero", @@ -375,7 +376,6 @@ class InstrumentationTest extends Specification { def graphQL = GraphQL .newGraphQL(StarWarsSchema.starWarsSchema) .instrumentation(instrumentation) - .doNotAddDefaultInstrumentations() // important, otherwise a chained one wil be used .build() when: @@ -394,7 +394,7 @@ class InstrumentationTest extends Specification { "start:field-human", "start:fetch-human", "start:complete-human", - "start:execution-strategy", + "start:execute-object", "start:field-id", "start:fetch-id", "start:complete-id", @@ -447,7 +447,48 @@ class InstrumentationTest extends Specification { def graphQL = GraphQL .newGraphQL(StarWarsSchema.starWarsSchema) .instrumentation(instrumentation1) - .doNotAddDefaultInstrumentations() // important, otherwise a chained one wil be used + .build() + + when: + def variables = [var: "1001"] + def er = graphQL.execute(ExecutionInput.newExecutionInput().query(query).variables(variables)) // Luke + + then: + er.extensions == [i1: "I1"] + } + + def "can have an backwards compatibility createState() in play"() { + + + given: + + def query = '''query Q($var: String!) { + human(id: $var) { + id + name + } + } + ''' + + + def instrumentation1 = new SimplePerformantInstrumentation() { + + @Override + InstrumentationState createState(InstrumentationCreateStateParameters parameters) { + return new StringInstrumentationState("I1") + } + + @Override + CompletableFuture instrumentExecutionResult(ExecutionResult executionResult, InstrumentationExecutionParameters parameters, InstrumentationState state) { + return CompletableFuture.completedFuture( + executionResult.transform { it.addExtension("i1", ((StringInstrumentationState) state).value) } + ) + } + } + + def graphQL = GraphQL + .newGraphQL(StarWarsSchema.starWarsSchema) + .instrumentation(instrumentation1) .build() when: diff --git a/src/test/groovy/graphql/execution/instrumentation/LegacyTestingInstrumentation.groovy b/src/test/groovy/graphql/execution/instrumentation/LegacyTestingInstrumentation.groovy index e8a9478cb6..eb43263e72 100644 --- a/src/test/groovy/graphql/execution/instrumentation/LegacyTestingInstrumentation.groovy +++ b/src/test/groovy/graphql/execution/instrumentation/LegacyTestingInstrumentation.groovy @@ -3,6 +3,7 @@ package graphql.execution.instrumentation import graphql.ExecutionInput import graphql.ExecutionResult import graphql.execution.ExecutionContext +import graphql.execution.instrumentation.parameters.InstrumentationCreateStateParameters import graphql.execution.instrumentation.parameters.InstrumentationExecuteOperationParameters import graphql.execution.instrumentation.parameters.InstrumentationExecutionParameters import graphql.execution.instrumentation.parameters.InstrumentationExecutionStrategyParameters @@ -33,91 +34,96 @@ class LegacyTestingInstrumentation implements Instrumentation { def useOnDispatch = false @Override - InstrumentationState createState() { - return instrumentationState + CompletableFuture createStateAsync(InstrumentationCreateStateParameters parameters) { + return CompletableFuture.completedFuture(instrumentationState) } @Override - InstrumentationContext beginExecution(InstrumentationExecutionParameters parameters) { - assert parameters.getInstrumentationState() == instrumentationState // Retain for test coverage + InstrumentationContext beginExecution(InstrumentationExecutionParameters parameters, InstrumentationState state) { + assert state == instrumentationState new TestingInstrumentContext("execution", executionList, throwableList, useOnDispatch) } @Override - InstrumentationContext beginParse(InstrumentationExecutionParameters parameters) { - assert parameters.getInstrumentationState() == instrumentationState // Retain for test coverage + InstrumentationContext beginParse(InstrumentationExecutionParameters parameters, InstrumentationState state) { + assert state == instrumentationState return new TestingInstrumentContext("parse", executionList, throwableList, useOnDispatch) } @Override - InstrumentationContext> beginValidation(InstrumentationValidationParameters parameters) { - assert parameters.getInstrumentationState() == instrumentationState // Retain for test coverage + InstrumentationContext> beginValidation(InstrumentationValidationParameters parameters, InstrumentationState state) { + assert state == instrumentationState return new TestingInstrumentContext("validation", executionList, throwableList, useOnDispatch) } @Override - ExecutionStrategyInstrumentationContext beginExecutionStrategy(InstrumentationExecutionStrategyParameters parameters) { - assert parameters.getInstrumentationState() == instrumentationState // Retain for test coverage + ExecutionStrategyInstrumentationContext beginExecutionStrategy(InstrumentationExecutionStrategyParameters parameters, InstrumentationState state) { + assert state == instrumentationState return new TestingExecutionStrategyInstrumentationContext("execution-strategy", executionList, throwableList, useOnDispatch) } @Override - InstrumentationContext beginExecuteOperation(InstrumentationExecuteOperationParameters parameters) { - assert parameters.getInstrumentationState() == instrumentationState // Retain for test coverage + ExecuteObjectInstrumentationContext beginExecuteObject(InstrumentationExecutionStrategyParameters parameters, InstrumentationState state) { + return new TestingExecuteObjectInstrumentationContext("execute-object", executionList, throwableList, useOnDispatch) + } + + @Override + InstrumentationContext beginExecuteOperation(InstrumentationExecuteOperationParameters parameters, InstrumentationState state) { + assert state == instrumentationState return new TestingInstrumentContext("execute-operation", executionList, throwableList, useOnDispatch) } @Override - InstrumentationContext beginSubscribedFieldEvent(InstrumentationFieldParameters parameters) { - assert parameters.getInstrumentationState() == instrumentationState // Retain for test coverage + InstrumentationContext beginSubscribedFieldEvent(InstrumentationFieldParameters parameters, InstrumentationState state) { + assert state == instrumentationState return new TestingInstrumentContext("subscribed-field-event-$parameters.field.name", executionList, throwableList, useOnDispatch) } @Override - InstrumentationContext beginField(InstrumentationFieldParameters parameters) { - assert parameters.getInstrumentationState() == instrumentationState // Retain for test coverage + InstrumentationContext beginFieldExecution(InstrumentationFieldParameters parameters, InstrumentationState state) { + assert state == instrumentationState return new TestingInstrumentContext("field-$parameters.field.name", executionList, throwableList, useOnDispatch) } @Override - InstrumentationContext beginFieldFetch(InstrumentationFieldFetchParameters parameters) { - assert parameters.getInstrumentationState() == instrumentationState // Retain for test coverage + InstrumentationContext beginFieldFetch(InstrumentationFieldFetchParameters parameters, InstrumentationState state) { + assert state == instrumentationState return new TestingInstrumentContext("fetch-$parameters.field.name", executionList, throwableList, useOnDispatch) } @Override - InstrumentationContext> beginFieldComplete(InstrumentationFieldCompleteParameters parameters) { - assert parameters.getInstrumentationState() == instrumentationState // Retain for test coverage + InstrumentationContext beginFieldCompletion(InstrumentationFieldCompleteParameters parameters, InstrumentationState state) { + assert state == instrumentationState return new TestingInstrumentContext("complete-$parameters.field.name", executionList, throwableList, useOnDispatch) } @Override - InstrumentationContext> beginFieldListComplete(InstrumentationFieldCompleteParameters parameters) { - assert parameters.getInstrumentationState() == instrumentationState // Retain for test coverage + InstrumentationContext beginFieldListCompletion(InstrumentationFieldCompleteParameters parameters, InstrumentationState state) { + assert state == instrumentationState return new TestingInstrumentContext("complete-list-$parameters.field.name", executionList, throwableList, useOnDispatch) } @Override - GraphQLSchema instrumentSchema(GraphQLSchema schema, InstrumentationExecutionParameters parameters) { - assert parameters.getInstrumentationState() == instrumentationState // Retain for test coverage + GraphQLSchema instrumentSchema(GraphQLSchema schema, InstrumentationExecutionParameters parameters, InstrumentationState state) { + assert state == instrumentationState return schema } @Override - ExecutionInput instrumentExecutionInput(ExecutionInput executionInput, InstrumentationExecutionParameters parameters) { - assert parameters.getInstrumentationState() == instrumentationState // Retain for test coverage + ExecutionInput instrumentExecutionInput(ExecutionInput executionInput, InstrumentationExecutionParameters parameters, InstrumentationState state) { + assert state == instrumentationState return executionInput } @Override - ExecutionContext instrumentExecutionContext(ExecutionContext executionContext, InstrumentationExecutionParameters parameters) { - assert parameters.getInstrumentationState() == instrumentationState // Retain for test coverage + ExecutionContext instrumentExecutionContext(ExecutionContext executionContext, InstrumentationExecutionParameters parameters, InstrumentationState state) { + assert state == instrumentationState return executionContext } @Override - DataFetcher instrumentDataFetcher(DataFetcher dataFetcher, InstrumentationFieldFetchParameters parameters) { - assert parameters.getInstrumentationState() == instrumentationState // Retain for test coverage + DataFetcher instrumentDataFetcher(DataFetcher dataFetcher, InstrumentationFieldFetchParameters parameters, InstrumentationState state) { + assert state == instrumentationState dfClasses.add(dataFetcher.getClass()) return new DataFetcher() { @Override @@ -129,8 +135,8 @@ class LegacyTestingInstrumentation implements Instrumentation { } @Override - CompletableFuture instrumentExecutionResult(ExecutionResult executionResult, InstrumentationExecutionParameters parameters) { - assert parameters.getInstrumentationState() == instrumentationState // Retain for test coverage + CompletableFuture instrumentExecutionResult(ExecutionResult executionResult, InstrumentationExecutionParameters parameters, InstrumentationState state) { + assert state == instrumentationState return CompletableFuture.completedFuture(executionResult) } } diff --git a/src/test/groovy/graphql/execution/instrumentation/ModernTestingInstrumentation.groovy b/src/test/groovy/graphql/execution/instrumentation/ModernTestingInstrumentation.groovy index 81648969f7..5d6fbb1d8e 100644 --- a/src/test/groovy/graphql/execution/instrumentation/ModernTestingInstrumentation.groovy +++ b/src/test/groovy/graphql/execution/instrumentation/ModernTestingInstrumentation.groovy @@ -33,8 +33,8 @@ class ModernTestingInstrumentation implements Instrumentation { boolean useOnDispatch = false @Override - InstrumentationState createState(InstrumentationCreateStateParameters parameters) { - return instrumentationState + CompletableFuture createStateAsync(InstrumentationCreateStateParameters parameters) { + return CompletableFuture.completedFuture(instrumentationState) } @Override @@ -61,6 +61,12 @@ class ModernTestingInstrumentation implements Instrumentation { return new TestingExecutionStrategyInstrumentationContext("execution-strategy", executionList, throwableList, useOnDispatch) } + @Override + ExecuteObjectInstrumentationContext beginExecuteObject(InstrumentationExecutionStrategyParameters parameters, InstrumentationState state) { + assert state == instrumentationState + return new TestingExecuteObjectInstrumentationContext("execute-object", executionList, throwableList, useOnDispatch) + } + @Override InstrumentationContext beginExecuteOperation(InstrumentationExecuteOperationParameters parameters, InstrumentationState state) { assert state == instrumentationState @@ -74,7 +80,7 @@ class ModernTestingInstrumentation implements Instrumentation { } @Override - InstrumentationContext beginField(InstrumentationFieldParameters parameters, InstrumentationState state) { + InstrumentationContext beginFieldExecution(InstrumentationFieldParameters parameters, InstrumentationState state) { assert state == instrumentationState return new TestingInstrumentContext("field-$parameters.field.name", executionList, throwableList, useOnDispatch) } @@ -86,13 +92,13 @@ class ModernTestingInstrumentation implements Instrumentation { } @Override - InstrumentationContext beginFieldComplete(InstrumentationFieldCompleteParameters parameters, InstrumentationState state) { + InstrumentationContext beginFieldCompletion(InstrumentationFieldCompleteParameters parameters, InstrumentationState state) { assert state == instrumentationState return new TestingInstrumentContext("complete-$parameters.field.name", executionList, throwableList, useOnDispatch) } @Override - InstrumentationContext beginFieldListComplete(InstrumentationFieldCompleteParameters parameters, InstrumentationState state) { + InstrumentationContext beginFieldListCompletion(InstrumentationFieldCompleteParameters parameters, InstrumentationState state) { assert state == instrumentationState return new TestingInstrumentContext("complete-list-$parameters.field.name", executionList, throwableList, useOnDispatch) } diff --git a/src/test/groovy/graphql/execution/instrumentation/NamedInstrumentation.groovy b/src/test/groovy/graphql/execution/instrumentation/NamedInstrumentation.groovy index 4ed707cfba..5e17e040ad 100644 --- a/src/test/groovy/graphql/execution/instrumentation/NamedInstrumentation.groovy +++ b/src/test/groovy/graphql/execution/instrumentation/NamedInstrumentation.groovy @@ -1,6 +1,7 @@ package graphql.execution.instrumentation import graphql.ExecutionResult +import graphql.execution.instrumentation.parameters.InstrumentationCreateStateParameters import graphql.execution.instrumentation.parameters.InstrumentationExecuteOperationParameters import graphql.execution.instrumentation.parameters.InstrumentationExecutionParameters import graphql.execution.instrumentation.parameters.InstrumentationExecutionStrategyParameters @@ -26,8 +27,8 @@ class NamedInstrumentation extends ModernTestingInstrumentation { } @Override - InstrumentationState createState() { - return instrumentationState + CompletableFuture createStateAsync(InstrumentationCreateStateParameters parameters) { + return CompletableFuture.completedFuture(instrumentationState) } def assertState(InstrumentationState instrumentationState) { @@ -66,9 +67,9 @@ class NamedInstrumentation extends ModernTestingInstrumentation { } @Override - InstrumentationContext beginField(InstrumentationFieldParameters parameters, InstrumentationState state) { + InstrumentationContext beginFieldExecution(InstrumentationFieldParameters parameters, InstrumentationState state) { assertState(state) - return super.beginField(parameters, state) + return super.beginFieldExecution(parameters, state) } @Override diff --git a/src/test/groovy/graphql/execution/instrumentation/NoContextChainedInstrumentationTest.groovy b/src/test/groovy/graphql/execution/instrumentation/NoContextChainedInstrumentationTest.groovy index a76c1e51d6..981f756633 100644 --- a/src/test/groovy/graphql/execution/instrumentation/NoContextChainedInstrumentationTest.groovy +++ b/src/test/groovy/graphql/execution/instrumentation/NoContextChainedInstrumentationTest.groovy @@ -42,7 +42,7 @@ class NoContextChainedInstrumentationTest extends Specification { "start:fetch-hero", "start:complete-hero", - "start:execution-strategy", + "start:execute-object", "start:field-id", "start:fetch-id", diff --git a/src/test/groovy/graphql/execution/instrumentation/TestingExecuteObjectInstrumentationContext.groovy b/src/test/groovy/graphql/execution/instrumentation/TestingExecuteObjectInstrumentationContext.groovy new file mode 100644 index 0000000000..a5b6cfd783 --- /dev/null +++ b/src/test/groovy/graphql/execution/instrumentation/TestingExecuteObjectInstrumentationContext.groovy @@ -0,0 +1,9 @@ +package graphql.execution.instrumentation + +class TestingExecuteObjectInstrumentationContext extends TestingInstrumentContext> implements ExecuteObjectInstrumentationContext { + + TestingExecuteObjectInstrumentationContext(Object op, Object executionList, Object throwableList, Boolean useOnDispatch) { + super(op, executionList, throwableList, useOnDispatch) + } +} + diff --git a/src/test/groovy/graphql/execution/instrumentation/TestingInstrumentContext.groovy b/src/test/groovy/graphql/execution/instrumentation/TestingInstrumentContext.groovy index cdfaa84513..402fd2aee0 100644 --- a/src/test/groovy/graphql/execution/instrumentation/TestingInstrumentContext.groovy +++ b/src/test/groovy/graphql/execution/instrumentation/TestingInstrumentContext.groovy @@ -25,7 +25,7 @@ class TestingInstrumentContext implements InstrumentationContext { } @Override - void onDispatched(CompletableFuture result) { + void onDispatched() { if (useOnDispatch) { this.executionList << "onDispatched:$op" } diff --git a/src/test/groovy/graphql/execution/instrumentation/dataloader/BatchCompareDataFetchers.java b/src/test/groovy/graphql/execution/instrumentation/dataloader/BatchCompareDataFetchers.java index 430f79a74c..7f20938b3b 100644 --- a/src/test/groovy/graphql/execution/instrumentation/dataloader/BatchCompareDataFetchers.java +++ b/src/test/groovy/graphql/execution/instrumentation/dataloader/BatchCompareDataFetchers.java @@ -77,15 +77,12 @@ private static List getDepartmentsForShop(Shop shop) { } private static List> getDepartmentsForShops(List shops) { - System.out.println("getDepartmentsForShops batch: " + shops); List> departmentsResult = shops.stream().map(BatchCompareDataFetchers::getDepartmentsForShop).collect(Collectors.toList()); - System.out.println("result " + departmentsResult); return departmentsResult; } private BatchLoader> departmentsForShopsBatchLoader = ids -> maybeAsyncWithSleep(() -> { - System.out.println("ids" + ids); departmentsForShopsBatchLoaderCounter.incrementAndGet(); List shopList = new ArrayList<>(); for (String id : ids) { @@ -127,7 +124,6 @@ private static List getProductsForDepartment(Department department) { } private static List> getProductsForDepartments(List departments) { - System.out.println("getProductsForDepartments batch: " + departments); return departments.stream().map(BatchCompareDataFetchers::getProductsForDepartment).collect(Collectors.toList()); } diff --git a/src/test/groovy/graphql/execution/instrumentation/dataloader/DataLoaderCompanyProductMutationTest.groovy b/src/test/groovy/graphql/execution/instrumentation/dataloader/DataLoaderCompanyProductMutationTest.groovy index 77e87642f6..649da5e0d4 100644 --- a/src/test/groovy/graphql/execution/instrumentation/dataloader/DataLoaderCompanyProductMutationTest.groovy +++ b/src/test/groovy/graphql/execution/instrumentation/dataloader/DataLoaderCompanyProductMutationTest.groovy @@ -66,7 +66,6 @@ class DataLoaderCompanyProductMutationTest extends Specification { def graphQL = TestUtil.graphQL(spec, wiring) .queryExecutionStrategy(queryES) .mutationExecutionStrategy(mutationES) - .instrumentation(new DataLoaderDispatcherInstrumentation()) .build() ExecutionInput executionInput = ExecutionInput.newExecutionInput() diff --git a/src/test/groovy/graphql/execution/instrumentation/dataloader/DataLoaderDispatcherInstrumentationTest.groovy b/src/test/groovy/graphql/execution/instrumentation/dataloader/DataLoaderDispatcherTest.groovy similarity index 76% rename from src/test/groovy/graphql/execution/instrumentation/dataloader/DataLoaderDispatcherInstrumentationTest.groovy rename to src/test/groovy/graphql/execution/instrumentation/dataloader/DataLoaderDispatcherTest.groovy index ca01f1b3d9..7eaa9cec10 100644 --- a/src/test/groovy/graphql/execution/instrumentation/dataloader/DataLoaderDispatcherInstrumentationTest.groovy +++ b/src/test/groovy/graphql/execution/instrumentation/dataloader/DataLoaderDispatcherTest.groovy @@ -1,15 +1,10 @@ package graphql.execution.instrumentation.dataloader import graphql.ExecutionInput -import graphql.ExecutionResult import graphql.GraphQL import graphql.TestUtil -import graphql.execution.AsyncExecutionStrategy import graphql.execution.AsyncSerialExecutionStrategy -import graphql.execution.ExecutionContext -import graphql.execution.ExecutionStrategyParameters import graphql.execution.instrumentation.ChainedInstrumentation -import graphql.execution.instrumentation.Instrumentation import graphql.execution.instrumentation.InstrumentationState import graphql.execution.instrumentation.SimplePerformantInstrumentation import graphql.execution.instrumentation.parameters.InstrumentationExecutionParameters @@ -29,17 +24,8 @@ import static graphql.StarWarsSchema.starWarsSchema import static graphql.schema.idl.RuntimeWiring.newRuntimeWiring import static graphql.schema.idl.TypeRuntimeWiring.newTypeWiring -class DataLoaderDispatcherInstrumentationTest extends Specification { +class DataLoaderDispatcherTest extends Specification { - class CaptureStrategy extends AsyncExecutionStrategy { - Instrumentation instrumentation = null - - @Override - CompletableFuture execute(ExecutionContext executionContext, ExecutionStrategyParameters parameters) { - instrumentation = executionContext.instrumentation - return super.execute(executionContext, parameters) - } - } def query = """ query { @@ -65,36 +51,7 @@ class DataLoaderDispatcherInstrumentationTest extends Specification { ] - def "dataloader instrumentation is always added and an empty data loader registry is in place"() { - def captureStrategy = new CaptureStrategy() - def graphQL = GraphQL.newGraphQL(starWarsSchema).queryExecutionStrategy(captureStrategy) - .instrumentation(new SimplePerformantInstrumentation()) - .build() - def executionInput = newExecutionInput().query('{ hero { name } }').build() - when: - graphQL.execute(executionInput) - then: - executionInput.getDataLoaderRegistry() != null - def chainedInstrumentation = captureStrategy.instrumentation as ChainedInstrumentation - chainedInstrumentation.instrumentations.any { instr -> instr instanceof DataLoaderDispatcherInstrumentation } - } - - def "dispatch is never called if data loader registry is not set"() { - def dataLoaderRegistry = new DataLoaderRegistry() { - @Override - void dispatchAll() { - assert false, "This should not be called when there are no data loaders" - } - } - def graphQL = GraphQL.newGraphQL(starWarsSchema).build() - def executionInput = newExecutionInput().query('{ hero { name } }').build() - - when: - def er = graphQL.execute(executionInput) - then: - er.errors.isEmpty() - } def "dispatch is called if there are data loaders"() { def dispatchedCalled = false @@ -131,7 +88,6 @@ class DataLoaderDispatcherInstrumentationTest extends Specification { DataLoaderRegistry startingDataLoaderRegistry = new DataLoaderRegistry() def enhancedDataLoaderRegistry = starWarsWiring.newDataLoaderRegistry() - def dlInstrumentation = new DataLoaderDispatcherInstrumentation() def enhancingInstrumentation = new SimplePerformantInstrumentation() { @NotNull @@ -142,7 +98,7 @@ class DataLoaderDispatcherInstrumentationTest extends Specification { } } - def chainedInstrumentation = new ChainedInstrumentation([dlInstrumentation, enhancingInstrumentation]) + def chainedInstrumentation = new ChainedInstrumentation([enhancingInstrumentation]) def graphql = GraphQL.newGraphQL(starWarsWiring.schema) .instrumentation(chainedInstrumentation).build() @@ -159,17 +115,16 @@ class DataLoaderDispatcherInstrumentationTest extends Specification { @Unroll - def "ensure DataLoaderDispatcherInstrumentation works for #executionStrategyName"() { + def "ensure DataLoaderDispatcher works for #executionStrategyName"() { given: def starWarsWiring = new StarWarsDataLoaderWiring() def dlRegistry = starWarsWiring.newDataLoaderRegistry() - def batchingInstrumentation = new DataLoaderDispatcherInstrumentation() def graphql = GraphQL.newGraphQL(starWarsWiring.schema) .queryExecutionStrategy(executionStrategy) - .instrumentation(batchingInstrumentation).build() + .build() when: @@ -186,14 +141,13 @@ class DataLoaderDispatcherInstrumentationTest extends Specification { "AsyncSerialExecutionStrategy" | new AsyncSerialExecutionStrategy() || _ } - def "basic batch loading is possible via instrumentation interception of Execution Strategies"() { + def "basic batch loading is possible"() { given: def starWarsWiring = new StarWarsDataLoaderWiring() def dlRegistry = starWarsWiring.newDataLoaderRegistry() - def batchingInstrumentation = new DataLoaderDispatcherInstrumentation() - def graphql = GraphQL.newGraphQL(starWarsWiring.schema).instrumentation(batchingInstrumentation).build() + def graphql = GraphQL.newGraphQL(starWarsWiring.schema).build() when: @@ -229,9 +183,9 @@ class DataLoaderDispatcherInstrumentationTest extends Specification { given: def starWarsWiring = new StarWarsDataLoaderWiring() def dlRegistry = starWarsWiring.newDataLoaderRegistry() - def batchingInstrumentation = new DataLoaderDispatcherInstrumentation() - def graphql = GraphQL.newGraphQL(starWarsWiring.schema).instrumentation(batchingInstrumentation).build() + def graphql = GraphQL.newGraphQL(starWarsWiring.schema) + .build() when: def query = """ @@ -302,9 +256,7 @@ class DataLoaderDispatcherInstrumentationTest extends Specification { given: def support = new DeepDataFetchers() def dummyDataloaderRegistry = new DataLoaderRegistry() - def batchingInstrumentation = new DataLoaderDispatcherInstrumentation() def graphql = GraphQL.newGraphQL(support.schema()) - .instrumentation(batchingInstrumentation) .build() // FieldLevelTrackingApproach uses LevelMaps with a default size of 16. // Use a value greater than 16 to ensure that the underlying LevelMaps are resized diff --git a/src/test/groovy/graphql/execution/instrumentation/dataloader/DataLoaderHangingTest.groovy b/src/test/groovy/graphql/execution/instrumentation/dataloader/DataLoaderHangingTest.groovy index cda31ba34b..2d98da377f 100644 --- a/src/test/groovy/graphql/execution/instrumentation/dataloader/DataLoaderHangingTest.groovy +++ b/src/test/groovy/graphql/execution/instrumentation/dataloader/DataLoaderHangingTest.groovy @@ -125,7 +125,6 @@ class DataLoaderHangingTest extends Specification { when: def graphql = GraphQL.newGraphQL(schema) - .instrumentation(new DataLoaderDispatcherInstrumentation()) .build() then: "execution shouldn't hang" @@ -353,7 +352,6 @@ class DataLoaderHangingTest extends Specification { GraphQL graphQL = GraphQL .newGraphQL(graphQLSchema) .queryExecutionStrategy(new AsyncExecutionStrategy(customExceptionHandlerThatThrows)) - .instrumentation(new DataLoaderDispatcherInstrumentation()) .build() when: diff --git a/src/test/groovy/graphql/execution/instrumentation/dataloader/DataLoaderNodeTest.groovy b/src/test/groovy/graphql/execution/instrumentation/dataloader/DataLoaderNodeTest.groovy index 0bfab06b4f..dd4be355f7 100644 --- a/src/test/groovy/graphql/execution/instrumentation/dataloader/DataLoaderNodeTest.groovy +++ b/src/test/groovy/graphql/execution/instrumentation/dataloader/DataLoaderNodeTest.groovy @@ -135,7 +135,7 @@ class DataLoaderNodeTest extends Specification { DataLoaderRegistry registry = new DataLoaderRegistry().register(childNodesFieldName, loader) ExecutionResult result = GraphQL.newGraphQL(schema) - .instrumentation(new DataLoaderDispatcherInstrumentation()) +// .instrumentation(new DataLoaderDispatcherInstrumentation()) .build() .execute(ExecutionInput.newExecutionInput().dataLoaderRegistry(registry).query( ''' diff --git a/src/test/groovy/graphql/execution/instrumentation/dataloader/DataLoaderPerformanceData.groovy b/src/test/groovy/graphql/execution/instrumentation/dataloader/DataLoaderPerformanceData.groovy index 7ca398f084..6246f883d8 100644 --- a/src/test/groovy/graphql/execution/instrumentation/dataloader/DataLoaderPerformanceData.groovy +++ b/src/test/groovy/graphql/execution/instrumentation/dataloader/DataLoaderPerformanceData.groovy @@ -1,10 +1,15 @@ package graphql.execution.instrumentation.dataloader - +import graphql.Directives import graphql.GraphQL import graphql.execution.instrumentation.Instrumentation +import graphql.execution.pubsub.CapturingSubscriber +import graphql.incremental.DelayedIncrementalPartialResult +import graphql.incremental.IncrementalExecutionResult import graphql.schema.GraphQLSchema +import org.awaitility.Awaitility import org.dataloader.DataLoaderRegistry +import org.reactivestreams.Publisher class DataLoaderPerformanceData { @@ -22,12 +27,21 @@ class DataLoaderPerformanceData { GraphQL setupGraphQL(Instrumentation instrumentation) { GraphQLSchema schema = new BatchCompare().buildDataLoaderSchema(batchCompareDataFetchers) + schema = schema.transform({ bldr -> bldr.additionalDirective(Directives.DeferDirective) }) GraphQL.newGraphQL(schema) .instrumentation(instrumentation) .build() } + GraphQL setupGraphQL() { + GraphQLSchema schema = new BatchCompare().buildDataLoaderSchema(batchCompareDataFetchers) + schema = schema.transform({ bldr -> bldr.additionalDirective(Directives.DeferDirective) }) + + GraphQL.newGraphQL(schema) + .build() + } + static def expectedData = [ shops: [ [id : "shop-1", name: "Shop 1", @@ -107,6 +121,46 @@ class DataLoaderPerformanceData { ] + static void assertIncrementalExpensiveData(List> incrementalResults) { + // Ordering is non-deterministic, so we assert on the things we know are going to be true. + + assert incrementalResults.size() == 25 + // only the last payload has "hasNext=true" + assert incrementalResults.subList(0, 24).every { it.hasNext == true } + assert incrementalResults[24].hasNext == false + + // every payload has only 1 incremental item, and the data is the same for all of them + assert incrementalResults.every { it.incremental.size() == 1 } + + def incrementalResultsItems = incrementalResults.collect { it.incremental[0] } + + // the order of the actual data is non-deterministic. So we assert via "any" that the data is there + assert incrementalResultsItems.any { it == [path: ["shops", 0], data: [departments: [[name: "Department 1"], [name: "Department 2"], [name: "Department 3"]]]] } + assert incrementalResultsItems.any { it == [path: ["shops", 0], data: [expensiveDepartments: [[name: "Department 1", products: [[name: "Product 1"]], expensiveProducts: [[name: "Product 1"]]], [name: "Department 2", products: [[name: "Product 2"]], expensiveProducts: [[name: "Product 2"]]], [name: "Department 3", products: [[name: "Product 3"]], expensiveProducts: [[name: "Product 3"]]]]]] } + assert incrementalResultsItems.any { it == [path: ["shops", 1], data: [expensiveDepartments: [[name: "Department 4", products: [[name: "Product 4"]], expensiveProducts: [[name: "Product 4"]]], [name: "Department 5", products: [[name: "Product 5"]], expensiveProducts: [[name: "Product 5"]]], [name: "Department 6", products: [[name: "Product 6"]], expensiveProducts: [[name: "Product 6"]]]]]] } + assert incrementalResultsItems.any { it == [path: ["shops", 1], data: [departments: [[name: "Department 4"], [name: "Department 5"], [name: "Department 6"]]]] } + assert incrementalResultsItems.any { it == [path: ["shops", 2], data: [departments: [[name: "Department 7"], [name: "Department 8"], [name: "Department 9"]]]] } + assert incrementalResultsItems.any { it == [path: ["shops", 2], data: [expensiveDepartments: [[name: "Department 7", products: [[name: "Product 7"]], expensiveProducts: [[name: "Product 7"]]], [name: "Department 8", products: [[name: "Product 8"]], expensiveProducts: [[name: "Product 8"]]], [name: "Department 9", products: [[name: "Product 9"]], expensiveProducts: [[name: "Product 9"]]]]]] } + assert incrementalResultsItems.any { it == [path: ["shops", 0, "departments", 0], data: [products: [[name: "Product 1"]]]] } + assert incrementalResultsItems.any { it == [path: ["shops", 0, "departments", 0], data: [expensiveProducts: [[name: "Product 1"]]]] } + assert incrementalResultsItems.any { it == [path: ["shops", 0, "departments", 1], data: [products: [[name: "Product 2"]]]] } + assert incrementalResultsItems.any { it == [path: ["shops", 0, "departments", 1], data: [expensiveProducts: [[name: "Product 2"]]]] } + assert incrementalResultsItems.any { it == [path: ["shops", 0, "departments", 2], data: [products: [[name: "Product 3"]]]] } + assert incrementalResultsItems.any { it == [path: ["shops", 0, "departments", 2], data: [expensiveProducts: [[name: "Product 3"]]]] } + assert incrementalResultsItems.any { it == [path: ["shops", 1, "departments", 0], data: [expensiveProducts: [[name: "Product 4"]]]] } + assert incrementalResultsItems.any { it == [path: ["shops", 1, "departments", 0], data: [products: [[name: "Product 4"]]]] } + assert incrementalResultsItems.any { it == [path: ["shops", 1, "departments", 1], data: [products: [[name: "Product 5"]]]] } + assert incrementalResultsItems.any { it == [path: ["shops", 1, "departments", 1], data: [expensiveProducts: [[name: "Product 5"]]]] } + assert incrementalResultsItems.any { it == [path: ["shops", 1, "departments", 2], data: [expensiveProducts: [[name: "Product 6"]]]] } + assert incrementalResultsItems.any { it == [path: ["shops", 1, "departments", 2], data: [products: [[name: "Product 6"]]]] } + assert incrementalResultsItems.any { it == [path: ["shops", 2, "departments", 0], data: [products: [[name: "Product 7"]]]] } + assert incrementalResultsItems.any { it == [path: ["shops", 2, "departments", 0], data: [expensiveProducts: [[name: "Product 7"]]]] } + assert incrementalResultsItems.any { it == [path: ["shops", 2, "departments", 1], data: [products: [[name: "Product 8"]]]] } + assert incrementalResultsItems.any { it == [path: ["shops", 2, "departments", 1], data: [expensiveProducts: [[name: "Product 8"]]]] } + assert incrementalResultsItems.any { it == [path: ["shops", 2, "departments", 2], data: [products: [[name: "Product 9"]]]] } + assert incrementalResultsItems.any { it == [path: ["shops", 2, "departments", 2], data: [expensiveProducts: [[name: "Product 9"]]]] } + assert incrementalResultsItems.any { it == [path: [], data: [expensiveShops: [[id: "exshop-1", name: "ExShop 1"], [id: "exshop-2", name: "ExShop 2"], [id: "exshop-3", name: "ExShop 3"]]]] } + } static def expensiveQuery = """ query { @@ -154,4 +208,152 @@ class DataLoaderPerformanceData { } } """ + + static def expectedInitialDeferredData = [ + data : [ + shops: [ + [id: "shop-1", name: "Shop 1"], + [id: "shop-2", name: "Shop 2"], + [id: "shop-3", name: "Shop 3"], + ] + ], + hasNext: true + ] + + static def expectedListOfDeferredData = [ + [ + hasNext : true, + incremental: [[ + path: ["shops", 0], + data: [ + departments: [ + [id: "department-1", name: "Department 1", products: [[id: "product-1", name: "Product 1"]]], + [id: "department-2", name: "Department 2", products: [[id: "product-2", name: "Product 2"]]], + [id: "department-3", name: "Department 3", products: [[id: "product-3", name: "Product 3"]]] + ] + ] + ]], + ], + [ + hasNext : true, + incremental: [[ + path: ["shops", 1], + data: [ + departments: [ + [id: "department-4", name: "Department 4", products: [[id: "product-4", name: "Product 4"]]], + [id: "department-5", name: "Department 5", products: [[id: "product-5", name: "Product 5"]]], + [id: "department-6", name: "Department 6", products: [[id: "product-6", name: "Product 6"]]] + ] + ], + ]], + ], + [ + hasNext : false, + incremental: [[ + path: ["shops", 2], + data: [ + departments: [ + [id: "department-7", name: "Department 7", products: [[id: "product-7", name: "Product 7"]]], + [id: "department-8", name: "Department 8", products: [[id: "product-8", name: "Product 8"]]], + [id: "department-9", name: "Department 9", products: [[id: "product-9", name: "Product 9"]]] + ] + ] + ]], + ] + ] + + + static def deferredQuery = """ + query { + shops { + id name + ... @defer { + departments { + id name + products { + id name + } + } + } + } + } + """ + + static def expensiveDeferredQuery = """ + query { + shops { + id name + ... @defer { + departments { + name + ... @defer { + products { + name + } + } + ... @defer { + expensiveProducts { + name + } + } + } + } + ... @defer { + expensiveDepartments { + name + products { + name + } + expensiveProducts { + name + } + } + } + } + ... @defer { + expensiveShops { + id name + } + } + } + """ + + static def expectedExpensiveDeferredData = [ + [[id: "exshop-1", name: "ExShop 1"], [id: "exshop-2", name: "ExShop 2"], [id: "exshop-3", name: "ExShop 3"]], + [[name: "Department 1",products:null, expensiveProducts:null], [name: "Department 2",products:null, expensiveProducts:null], [name: "Department 3",products:null, expensiveProducts:null]], + [[name: "Department 1", products: [[name: "Product 1"]], expensiveProducts: [[name: "Product 1"]]], [name: "Department 2", products: [[name: "Product 2"]], expensiveProducts: [[name: "Product 2"]]], [name: "Department 3", products: [[name: "Product 3"]], expensiveProducts: [[name: "Product 3"]]]], + [[name: "Department 4",products:null, expensiveProducts:null], [name: "Department 5",products:null, expensiveProducts:null], [name: "Department 6",products:null, expensiveProducts:null]], + [[name: "Department 4", products: [[name: "Product 4"]], expensiveProducts: [[name: "Product 4"]]], [name: "Department 5", products: [[name: "Product 5"]], expensiveProducts: [[name: "Product 5"]]], [name: "Department 6", products: [[name: "Product 6"]], expensiveProducts: [[name: "Product 6"]]]], + [[name: "Department 7",products:null, expensiveProducts:null], [name: "Department 8",products:null, expensiveProducts:null], [name: "Department 9",products:null, expensiveProducts:null]], + [[name: "Department 7", products: [[name: "Product 7"]], expensiveProducts: [[name: "Product 7"]]], [name: "Department 8", products: [[name: "Product 8"]], expensiveProducts: [[name: "Product 8"]]], [name: "Department 9", products: [[name: "Product 9"]], expensiveProducts: [[name: "Product 9"]]]], + [[name: "Product 1"]], + [[name: "Product 1"]], + [[name: "Product 2"]], + [[name: "Product 2"]], + [[name: "Product 3"]], + [[name: "Product 3"]], + [[name: "Product 4"]], + [[name: "Product 4"]], + [[name: "Product 5"]], + [[name: "Product 5"]], + [[name: "Product 6"]], + [[name: "Product 6"]], + [[name: "Product 7"]], + [[name: "Product 7"]], + [[name: "Product 8"]], + [[name: "Product 8"]], + [[name: "Product 9"]], + [[name: "Product 9"]], + ] + + static List> getIncrementalResults(IncrementalExecutionResult initialResult) { + Publisher deferredResultStream = initialResult.incrementalItemPublisher + + def subscriber = new CapturingSubscriber() + deferredResultStream.subscribe(subscriber) + Awaitility.await().untilTrue(subscriber.isDone()) + + return subscriber.getEvents() + .collect { it.toSpecification() } + } } diff --git a/src/test/groovy/graphql/execution/instrumentation/dataloader/DataLoaderPerformanceTest.groovy b/src/test/groovy/graphql/execution/instrumentation/dataloader/DataLoaderPerformanceTest.groovy index 4565db0615..61fba228bf 100644 --- a/src/test/groovy/graphql/execution/instrumentation/dataloader/DataLoaderPerformanceTest.groovy +++ b/src/test/groovy/graphql/execution/instrumentation/dataloader/DataLoaderPerformanceTest.groovy @@ -1,15 +1,22 @@ package graphql.execution.instrumentation.dataloader - import graphql.ExecutionInput import graphql.GraphQL -import graphql.execution.instrumentation.Instrumentation +import graphql.incremental.IncrementalExecutionResult import org.dataloader.DataLoaderRegistry +import spock.lang.Ignore import spock.lang.Specification +import static graphql.ExperimentalApi.ENABLE_INCREMENTAL_SUPPORT +import static graphql.execution.instrumentation.dataloader.DataLoaderPerformanceData.assertIncrementalExpensiveData +import static graphql.execution.instrumentation.dataloader.DataLoaderPerformanceData.expectedExpensiveData +import static graphql.execution.instrumentation.dataloader.DataLoaderPerformanceData.expectedInitialDeferredData +import static graphql.execution.instrumentation.dataloader.DataLoaderPerformanceData.getDeferredQuery import static graphql.execution.instrumentation.dataloader.DataLoaderPerformanceData.getExpectedData -import static graphql.execution.instrumentation.dataloader.DataLoaderPerformanceData.getExpectedExpensiveData +import static graphql.execution.instrumentation.dataloader.DataLoaderPerformanceData.getExpectedListOfDeferredData +import static graphql.execution.instrumentation.dataloader.DataLoaderPerformanceData.getExpensiveDeferredQuery import static graphql.execution.instrumentation.dataloader.DataLoaderPerformanceData.getExpensiveQuery +import static graphql.execution.instrumentation.dataloader.DataLoaderPerformanceData.getIncrementalResults import static graphql.execution.instrumentation.dataloader.DataLoaderPerformanceData.getQuery class DataLoaderPerformanceTest extends Specification { @@ -22,13 +29,16 @@ class DataLoaderPerformanceTest extends Specification { batchCompareDataFetchers = new BatchCompareDataFetchers() DataLoaderPerformanceData dataLoaderPerformanceData = new DataLoaderPerformanceData(batchCompareDataFetchers) dataLoaderRegistry = dataLoaderPerformanceData.setupDataLoaderRegistry() - Instrumentation instrumentation = new DataLoaderDispatcherInstrumentation() - graphQL = dataLoaderPerformanceData.setupGraphQL(instrumentation) + graphQL = dataLoaderPerformanceData.setupGraphQL() } def "760 ensure data loader is performant for lists"() { when: - ExecutionInput executionInput = ExecutionInput.newExecutionInput().query(query).dataLoaderRegistry(dataLoaderRegistry).build() + ExecutionInput executionInput = ExecutionInput.newExecutionInput() + .query(query) + .dataLoaderRegistry(dataLoaderRegistry) + .graphQLContext([(ENABLE_INCREMENTAL_SUPPORT): incrementalSupport]) + .build() def result = graphQL.execute(executionInput) then: @@ -37,13 +47,20 @@ class DataLoaderPerformanceTest extends Specification { // eg 1 for shops-->departments and one for departments --> products batchCompareDataFetchers.departmentsForShopsBatchLoaderCounter.get() == 1 batchCompareDataFetchers.productsForDepartmentsBatchLoaderCounter.get() == 1 + + where: + incrementalSupport << [true, false] } def "970 ensure data loader is performant for multiple field with lists"() { when: - ExecutionInput executionInput = ExecutionInput.newExecutionInput().query(expensiveQuery).dataLoaderRegistry(dataLoaderRegistry).build() + ExecutionInput executionInput = ExecutionInput.newExecutionInput() + .query(expensiveQuery) + .dataLoaderRegistry(dataLoaderRegistry) + .graphQLContext([(ENABLE_INCREMENTAL_SUPPORT): incrementalSupport]) + .build() def result = graphQL.execute(executionInput) then: @@ -51,6 +68,9 @@ class DataLoaderPerformanceTest extends Specification { batchCompareDataFetchers.departmentsForShopsBatchLoaderCounter.get() <= 2 batchCompareDataFetchers.productsForDepartmentsBatchLoaderCounter.get() <= 2 + + where: + incrementalSupport << [true, false] } def "ensure data loader is performant for lists using async batch loading"() { @@ -59,7 +79,12 @@ class DataLoaderPerformanceTest extends Specification { batchCompareDataFetchers.useAsyncBatchLoading(true) - ExecutionInput executionInput = ExecutionInput.newExecutionInput().query(query).dataLoaderRegistry(dataLoaderRegistry).build() + ExecutionInput executionInput = ExecutionInput.newExecutionInput() + .query(query) + .dataLoaderRegistry(dataLoaderRegistry) + .graphQLContext([(ENABLE_INCREMENTAL_SUPPORT): incrementalSupport]) + .build() + def result = graphQL.execute(executionInput) then: @@ -69,6 +94,8 @@ class DataLoaderPerformanceTest extends Specification { batchCompareDataFetchers.departmentsForShopsBatchLoaderCounter.get() == 1 batchCompareDataFetchers.productsForDepartmentsBatchLoaderCounter.get() == 1 + where: + incrementalSupport << [true, false] } def "970 ensure data loader is performant for multiple field with lists using async batch loading"() { @@ -77,7 +104,12 @@ class DataLoaderPerformanceTest extends Specification { batchCompareDataFetchers.useAsyncBatchLoading(true) - ExecutionInput executionInput = ExecutionInput.newExecutionInput().query(expensiveQuery).dataLoaderRegistry(dataLoaderRegistry).build() + ExecutionInput executionInput = ExecutionInput.newExecutionInput() + .query(expensiveQuery) + .dataLoaderRegistry(dataLoaderRegistry) + .graphQLContext([(ENABLE_INCREMENTAL_SUPPORT): incrementalSupport]) + .build() + def result = graphQL.execute(executionInput) then: @@ -85,6 +117,79 @@ class DataLoaderPerformanceTest extends Specification { batchCompareDataFetchers.departmentsForShopsBatchLoaderCounter.get() <= 2 batchCompareDataFetchers.productsForDepartmentsBatchLoaderCounter.get() <= 2 + + where: + incrementalSupport << [true, false] + } + + def "data loader will not work with deferred queries"() { + when: + ExecutionInput executionInput = ExecutionInput.newExecutionInput() + .query(deferredQuery) + .dataLoaderRegistry(dataLoaderRegistry) + .graphQLContext([(ENABLE_INCREMENTAL_SUPPORT): true]) + .build() + + def result = graphQL.execute(executionInput) + println(result); + + then: + def exception = thrown(UnsupportedOperationException) + exception.message == "Data Loaders cannot be used to resolve deferred fields" + } + + @Ignore("Resolution of deferred fields via Data loaders is not yet supported") + def "data loader will work with deferred queries"() { + + when: + + ExecutionInput executionInput = ExecutionInput.newExecutionInput() + .query(deferredQuery) + .dataLoaderRegistry(dataLoaderRegistry) + .graphQLContext([(ENABLE_INCREMENTAL_SUPPORT): true]) + .build() + + IncrementalExecutionResult result = graphQL.execute(executionInput) + + then: + result.toSpecification() == expectedInitialDeferredData + + when: + def incrementalResults = getIncrementalResults(result) + + then: + incrementalResults == expectedListOfDeferredData + + // With deferred results, we don't achieve the same efficiency. + batchCompareDataFetchers.departmentsForShopsBatchLoaderCounter.get() == 3 + batchCompareDataFetchers.productsForDepartmentsBatchLoaderCounter.get() == 3 } + @Ignore("Resolution of deferred fields via Data loaders is not yet supported") + def "data loader will work with deferred queries on multiple levels deep"() { + + when: + + ExecutionInput executionInput = ExecutionInput.newExecutionInput() + .query(expensiveDeferredQuery) + .dataLoaderRegistry(dataLoaderRegistry) + .graphQLContext([(ENABLE_INCREMENTAL_SUPPORT): true]) + .build() + + IncrementalExecutionResult result = graphQL.execute(executionInput) + + then: + result.toSpecification() == expectedInitialDeferredData + + when: + def incrementalResults = getIncrementalResults(result) + + then: + assertIncrementalExpensiveData(incrementalResults) + + // With deferred results, we don't achieve the same efficiency. + // The final number of loader calls is non-deterministic, so we can't assert an exact number. + batchCompareDataFetchers.departmentsForShopsBatchLoaderCounter.get() >= 3 + batchCompareDataFetchers.productsForDepartmentsBatchLoaderCounter.get() >= 3 + } } diff --git a/src/test/groovy/graphql/execution/instrumentation/dataloader/DataLoaderPerformanceWithChainedInstrumentationTest.groovy b/src/test/groovy/graphql/execution/instrumentation/dataloader/DataLoaderPerformanceWithChainedInstrumentationTest.groovy index 7b1bd96d54..e5d1089bab 100644 --- a/src/test/groovy/graphql/execution/instrumentation/dataloader/DataLoaderPerformanceWithChainedInstrumentationTest.groovy +++ b/src/test/groovy/graphql/execution/instrumentation/dataloader/DataLoaderPerformanceWithChainedInstrumentationTest.groovy @@ -1,17 +1,22 @@ package graphql.execution.instrumentation.dataloader - import graphql.ExecutionInput import graphql.GraphQL -import graphql.execution.instrumentation.ChainedInstrumentation -import graphql.execution.instrumentation.Instrumentation +import graphql.incremental.IncrementalExecutionResult import org.dataloader.DataLoaderRegistry import spock.lang.Ignore import spock.lang.Specification +import static graphql.ExperimentalApi.ENABLE_INCREMENTAL_SUPPORT +import static graphql.execution.instrumentation.dataloader.DataLoaderPerformanceData.assertIncrementalExpensiveData +import static graphql.execution.instrumentation.dataloader.DataLoaderPerformanceData.expectedExpensiveData +import static graphql.execution.instrumentation.dataloader.DataLoaderPerformanceData.expectedInitialDeferredData +import static graphql.execution.instrumentation.dataloader.DataLoaderPerformanceData.expectedListOfDeferredData +import static graphql.execution.instrumentation.dataloader.DataLoaderPerformanceData.getDeferredQuery import static graphql.execution.instrumentation.dataloader.DataLoaderPerformanceData.getExpectedData -import static graphql.execution.instrumentation.dataloader.DataLoaderPerformanceData.getExpectedExpensiveData +import static graphql.execution.instrumentation.dataloader.DataLoaderPerformanceData.getExpensiveDeferredQuery import static graphql.execution.instrumentation.dataloader.DataLoaderPerformanceData.getExpensiveQuery +import static graphql.execution.instrumentation.dataloader.DataLoaderPerformanceData.getIncrementalResults import static graphql.execution.instrumentation.dataloader.DataLoaderPerformanceData.getQuery class DataLoaderPerformanceWithChainedInstrumentationTest extends Specification { @@ -26,15 +31,18 @@ class DataLoaderPerformanceWithChainedInstrumentationTest extends Specification DataLoaderPerformanceData dataLoaderPerformanceData = new DataLoaderPerformanceData(batchCompareDataFetchers) dataLoaderRegistry = dataLoaderPerformanceData.setupDataLoaderRegistry() - Instrumentation instrumentation = new ChainedInstrumentation( - Collections.singletonList(new DataLoaderDispatcherInstrumentation())) - graphQL = dataLoaderPerformanceData.setupGraphQL(instrumentation) + graphQL = dataLoaderPerformanceData.setupGraphQL() } def "chainedInstrumentation: 760 ensure data loader is performant for lists"() { when: - ExecutionInput executionInput = ExecutionInput.newExecutionInput().query(query).dataLoaderRegistry(dataLoaderRegistry).build() + ExecutionInput executionInput = ExecutionInput.newExecutionInput() + .query(query) + .dataLoaderRegistry(dataLoaderRegistry) + .graphQLContext([(ENABLE_INCREMENTAL_SUPPORT): incrementalSupport]) + .build() + def result = graphQL.execute(executionInput) then: @@ -43,6 +51,9 @@ class DataLoaderPerformanceWithChainedInstrumentationTest extends Specification // eg 1 for shops-->departments and one for departments --> products batchCompareDataFetchers.departmentsForShopsBatchLoaderCounter.get() == 1 batchCompareDataFetchers.productsForDepartmentsBatchLoaderCounter.get() == 1 + + where: + incrementalSupport << [true, false] } @Ignore("This test flakes on Travis for some reason. Clearly this indicates some sort of problem to investigate. However it also stop releases.") @@ -50,7 +61,11 @@ class DataLoaderPerformanceWithChainedInstrumentationTest extends Specification when: - ExecutionInput executionInput = ExecutionInput.newExecutionInput().query(expensiveQuery).dataLoaderRegistry(dataLoaderRegistry).build() + ExecutionInput executionInput = ExecutionInput.newExecutionInput() + .query(expensiveQuery) + .dataLoaderRegistry(dataLoaderRegistry) + .graphQLContext([(ENABLE_INCREMENTAL_SUPPORT): incrementalSupport]) + .build() def result = graphQL.execute(executionInput) @@ -60,6 +75,8 @@ class DataLoaderPerformanceWithChainedInstrumentationTest extends Specification batchCompareDataFetchers.departmentsForShopsBatchLoaderCounter.get() == 1 batchCompareDataFetchers.productsForDepartmentsBatchLoaderCounter.get() == 1 + where: + incrementalSupport << [true, false] } def "chainedInstrumentation: ensure data loader is performant for lists using async batch loading"() { @@ -68,7 +85,11 @@ class DataLoaderPerformanceWithChainedInstrumentationTest extends Specification batchCompareDataFetchers.useAsyncBatchLoading(true) - ExecutionInput executionInput = ExecutionInput.newExecutionInput().query(query).dataLoaderRegistry(dataLoaderRegistry).build() + ExecutionInput executionInput = ExecutionInput.newExecutionInput() + .query(query) + .dataLoaderRegistry(dataLoaderRegistry) + .graphQLContext([(ENABLE_INCREMENTAL_SUPPORT): incrementalSupport]) + .build() def result = graphQL.execute(executionInput) then: @@ -78,6 +99,8 @@ class DataLoaderPerformanceWithChainedInstrumentationTest extends Specification batchCompareDataFetchers.departmentsForShopsBatchLoaderCounter.get() == 1 batchCompareDataFetchers.productsForDepartmentsBatchLoaderCounter.get() == 1 + where: + incrementalSupport << [true, false] } def "chainedInstrumentation: 970 ensure data loader is performant for multiple field with lists using async batch loading"() { @@ -86,7 +109,11 @@ class DataLoaderPerformanceWithChainedInstrumentationTest extends Specification batchCompareDataFetchers.useAsyncBatchLoading(true) - ExecutionInput executionInput = ExecutionInput.newExecutionInput().query(expensiveQuery).dataLoaderRegistry(dataLoaderRegistry).build() + ExecutionInput executionInput = ExecutionInput.newExecutionInput() + .query(expensiveQuery) + .dataLoaderRegistry(dataLoaderRegistry) + .graphQLContext([(ENABLE_INCREMENTAL_SUPPORT): incrementalSupport]) + .build() def result = graphQL.execute(executionInput) then: @@ -94,7 +121,78 @@ class DataLoaderPerformanceWithChainedInstrumentationTest extends Specification batchCompareDataFetchers.departmentsForShopsBatchLoaderCounter.get() <= 2 batchCompareDataFetchers.productsForDepartmentsBatchLoaderCounter.get() <= 2 + + where: + incrementalSupport << [true, false] + } + + def "chainedInstrumentation: data loader will not work with deferred queries"() { + when: + ExecutionInput executionInput = ExecutionInput.newExecutionInput() + .query(deferredQuery) + .dataLoaderRegistry(dataLoaderRegistry) + .graphQLContext([(ENABLE_INCREMENTAL_SUPPORT): true]) + .build() + + graphQL.execute(executionInput) + + then: + def exception = thrown(UnsupportedOperationException) + exception.message == "Data Loaders cannot be used to resolve deferred fields" + } + + @Ignore("Resolution of deferred fields via Data loaders is not yet supported") + def "chainedInstrumentation: data loader will work with deferred queries"() { + + when: + + ExecutionInput executionInput = ExecutionInput.newExecutionInput() + .query(deferredQuery) + .dataLoaderRegistry(dataLoaderRegistry) + .graphQLContext([(ENABLE_INCREMENTAL_SUPPORT): true]) + .build() + + IncrementalExecutionResult result = graphQL.execute(executionInput) + + then: + result.toSpecification() == expectedInitialDeferredData + + when: + def incrementalResults = getIncrementalResults(result) + + then: + incrementalResults == expectedListOfDeferredData + + // With deferred results, we don't achieve the same efficiency. + batchCompareDataFetchers.departmentsForShopsBatchLoaderCounter.get() == 3 + batchCompareDataFetchers.productsForDepartmentsBatchLoaderCounter.get() == 3 } + @Ignore("Resolution of deferred fields via Data loaders is not yet supported") + def "chainedInstrumentation: data loader will work with deferred queries on multiple levels deep"() { + when: + ExecutionInput executionInput = ExecutionInput.newExecutionInput() + .graphQLContext([(ENABLE_INCREMENTAL_SUPPORT): true]) + .query(expensiveDeferredQuery) + .dataLoaderRegistry(dataLoaderRegistry) + .build() + + IncrementalExecutionResult result = graphQL.execute(executionInput) + + then: + result.toSpecification() == expectedInitialDeferredData + + when: + def incrementalResults = getIncrementalResults(result) + + + then: + assertIncrementalExpensiveData(incrementalResults) + + // With deferred results, we don't achieve the same efficiency. + // The final number of loader calls is non-deterministic, so we can't assert an exact number. + batchCompareDataFetchers.departmentsForShopsBatchLoaderCounter.get() >= 3 + batchCompareDataFetchers.productsForDepartmentsBatchLoaderCounter.get() >= 3 + } } diff --git a/src/test/groovy/graphql/execution/instrumentation/dataloader/DataLoaderTypeMismatchTest.groovy b/src/test/groovy/graphql/execution/instrumentation/dataloader/DataLoaderTypeMismatchTest.groovy index 79ea8e2a49..03b60e4e39 100644 --- a/src/test/groovy/graphql/execution/instrumentation/dataloader/DataLoaderTypeMismatchTest.groovy +++ b/src/test/groovy/graphql/execution/instrumentation/dataloader/DataLoaderTypeMismatchTest.groovy @@ -62,7 +62,6 @@ class DataLoaderTypeMismatchTest extends Specification { def schema = new SchemaGenerator().makeExecutableSchema(typeDefinitionRegistry, wiring) def graphql = GraphQL.newGraphQL(schema) - .instrumentation(new DataLoaderDispatcherInstrumentation()) .build() when: diff --git a/src/test/groovy/graphql/execution/instrumentation/dataloader/Issue1178DataLoaderDispatchTest.groovy b/src/test/groovy/graphql/execution/instrumentation/dataloader/Issue1178DataLoaderDispatchTest.groovy index 1c206692bc..b816602cde 100644 --- a/src/test/groovy/graphql/execution/instrumentation/dataloader/Issue1178DataLoaderDispatchTest.groovy +++ b/src/test/groovy/graphql/execution/instrumentation/dataloader/Issue1178DataLoaderDispatchTest.groovy @@ -75,8 +75,6 @@ class Issue1178DataLoaderDispatchTest extends Specification { when: def graphql = TestUtil.graphQL(sdl, wiring) - .instrumentation(new DataLoaderDispatcherInstrumentation()) - .instrumentation(new DataLoaderDispatcherInstrumentation()) .build() then: "execution shouldn't error" diff --git a/src/test/groovy/graphql/execution/instrumentation/dataloader/PeopleCompaniesAndProductsDataLoaderTest.groovy b/src/test/groovy/graphql/execution/instrumentation/dataloader/PeopleCompaniesAndProductsDataLoaderTest.groovy index 1b36ec4ecb..70bad946b0 100644 --- a/src/test/groovy/graphql/execution/instrumentation/dataloader/PeopleCompaniesAndProductsDataLoaderTest.groovy +++ b/src/test/groovy/graphql/execution/instrumentation/dataloader/PeopleCompaniesAndProductsDataLoaderTest.groovy @@ -18,7 +18,6 @@ import spock.lang.Specification import java.util.concurrent.CompletableFuture import java.util.concurrent.CompletionStage import java.util.stream.Collectors -import java.util.stream.IntStream import static graphql.schema.idl.RuntimeWiring.newRuntimeWiring @@ -184,7 +183,6 @@ class PeopleCompaniesAndProductsDataLoaderTest extends Specification { GraphQL graphQL = GraphQL .newGraphQL(graphQLSchema) - .instrumentation(new DataLoaderDispatcherInstrumentation()) .build() when: diff --git a/src/test/groovy/graphql/execution/instrumentation/fieldvalidation/FieldValidationTest.groovy b/src/test/groovy/graphql/execution/instrumentation/fieldvalidation/FieldValidationTest.groovy index 06034eb2c3..376c5168fa 100644 --- a/src/test/groovy/graphql/execution/instrumentation/fieldvalidation/FieldValidationTest.groovy +++ b/src/test/groovy/graphql/execution/instrumentation/fieldvalidation/FieldValidationTest.groovy @@ -12,7 +12,6 @@ import graphql.execution.ExecutionId import graphql.execution.ResultPath import graphql.execution.ValueUnboxer import graphql.execution.instrumentation.ChainedInstrumentation -import graphql.execution.instrumentation.Instrumentation import graphql.execution.instrumentation.SimplePerformantInstrumentation import graphql.execution.instrumentation.parameters.InstrumentationCreateStateParameters import spock.lang.Specification @@ -308,10 +307,10 @@ class FieldValidationTest extends Specification { def document = TestUtil.parseQuery(query) def strategy = new AsyncExecutionStrategy() def instrumentation = new FieldValidationInstrumentation(validation) - def execution = new Execution(strategy, strategy, strategy, instrumentation, ValueUnboxer.DEFAULT) + def execution = new Execution(strategy, strategy, strategy, instrumentation, ValueUnboxer.DEFAULT, false) def executionInput = ExecutionInput.newExecutionInput().query(query).variables(variables).build() - execution.execute(document, schema, ExecutionId.generate(), executionInput, SimplePerformantInstrumentation.INSTANCE.createState(new InstrumentationCreateStateParameters(schema, executionInput))) + execution.execute(document, schema, ExecutionId.generate(), executionInput, null) } def "test graphql from end to end with chained instrumentation"() { @@ -322,7 +321,7 @@ class FieldValidationTest extends Specification { } } def instrumentations = [new FieldValidationInstrumentation - (fieldValidation)] + (fieldValidation)] def chainedInstrumentation = new ChainedInstrumentation(instrumentations); def graphql = GraphQL .newGraphQL(schema) diff --git a/src/test/groovy/graphql/execution/preparsed/NoOpPreparsedDocumentProviderTest.groovy b/src/test/groovy/graphql/execution/preparsed/NoOpPreparsedDocumentProviderTest.groovy index d0c38760e1..1926f0ff77 100644 --- a/src/test/groovy/graphql/execution/preparsed/NoOpPreparsedDocumentProviderTest.groovy +++ b/src/test/groovy/graphql/execution/preparsed/NoOpPreparsedDocumentProviderTest.groovy @@ -13,10 +13,10 @@ class NoOpPreparsedDocumentProviderTest extends Specification { def documentEntry = new PreparsedDocumentEntry(Document.newDocument().build()) when: - def actual = provider.getDocument(newExecutionInput("{}").build(), { return documentEntry }) + def actual = provider.getDocumentAsync(newExecutionInput("{}").build(), { return documentEntry }) then: - actual == documentEntry + actual.join() == documentEntry } } diff --git a/src/test/groovy/graphql/execution/preparsed/PreparsedDocumentEntryTest.groovy b/src/test/groovy/graphql/execution/preparsed/PreparsedDocumentEntryTest.groovy index 96e5d0f2f3..4cfa7d0e15 100644 --- a/src/test/groovy/graphql/execution/preparsed/PreparsedDocumentEntryTest.groovy +++ b/src/test/groovy/graphql/execution/preparsed/PreparsedDocumentEntryTest.groovy @@ -33,7 +33,7 @@ class PreparsedDocumentEntryTest extends Specification { def "Ensure a non-null errors returns"() { given: def errors = [new InvalidSyntaxError(new SourceLocation(0, 0), "bang"), - new ValidationError(ValidationErrorType.InvalidSyntax)] + ValidationError.newValidationError().validationErrorType(ValidationErrorType.InvalidSyntax).build()] when: def docEntry = new PreparsedDocumentEntry(errors) diff --git a/src/test/groovy/graphql/execution/preparsed/PreparsedDocumentProviderTest.groovy b/src/test/groovy/graphql/execution/preparsed/PreparsedDocumentProviderTest.groovy index 104d00ea28..fc1eb054f1 100644 --- a/src/test/groovy/graphql/execution/preparsed/PreparsedDocumentProviderTest.groovy +++ b/src/test/groovy/graphql/execution/preparsed/PreparsedDocumentProviderTest.groovy @@ -11,9 +11,9 @@ import graphql.execution.instrumentation.LegacyTestingInstrumentation import graphql.execution.instrumentation.SimplePerformantInstrumentation import graphql.execution.instrumentation.parameters.InstrumentationExecutionParameters import graphql.language.Document -import graphql.parser.Parser import spock.lang.Specification +import java.util.concurrent.CompletableFuture import java.util.function.Function import static graphql.ExecutionInput.newExecutionInput @@ -39,7 +39,7 @@ class PreparsedDocumentProviderTest extends Specification { "end:fetch-hero", "start:complete-hero", - "start:execution-strategy", + "start:execute-object", "start:field-id", "start:fetch-id", @@ -48,7 +48,7 @@ class PreparsedDocumentProviderTest extends Specification { "end:complete-id", "end:field-id", - "end:execution-strategy", + "end:execute-object", "end:complete-hero", "end:field-hero", @@ -80,7 +80,7 @@ class PreparsedDocumentProviderTest extends Specification { "end:fetch-hero", "start:complete-hero", - "start:execution-strategy", + "start:execute-object", "start:field-id", "start:fetch-id", @@ -89,7 +89,7 @@ class PreparsedDocumentProviderTest extends Specification { "end:complete-id", "end:field-id", - "end:execution-strategy", + "end:execute-object", "end:complete-hero", "end:field-hero", @@ -193,13 +193,13 @@ class PreparsedDocumentProviderTest extends Specification { def documentProvider = new PreparsedDocumentProvider() { @Override - PreparsedDocumentEntry getDocument(ExecutionInput executionInput, Function parseAndValidateFunction) { + CompletableFuture getDocumentAsync(ExecutionInput executionInput, Function parseAndValidateFunction) { if (executionInput.getQuery() == "#A") { executionInput = executionInput.transform({ it.query(queryA) }) } else { executionInput = executionInput.transform({ it.query(queryB) }) } - return parseAndValidateFunction.apply(executionInput) + return CompletableFuture.completedFuture(parseAndValidateFunction.apply(executionInput)) } } @@ -225,28 +225,4 @@ class PreparsedDocumentProviderTest extends Specification { resultB.data == [hero: [name: "R2-D2"]] instrumentationB.capturedInput.getQuery() == queryB } - - def "sync method and async method result is same"() { - given: - def provider = new TestingPreparsedDocumentProvider() - def queryA = """ - query A { - hero { - id - } - } - """ - def engineParser = { - ExecutionInput ei -> - def doc = new Parser().parseDocument(ei.getQuery()) - return new PreparsedDocumentEntry(doc) - } - when: - def syncMethod = provider.getDocument(newExecutionInput(queryA).build(), engineParser) - def asyncMethod = provider.getDocumentAsync(newExecutionInput(queryA).build(), engineParser) - - then: - asyncMethod != null - asyncMethod.get().equals(syncMethod) - } } diff --git a/src/test/groovy/graphql/execution/preparsed/TestingPreparsedDocumentProvider.groovy b/src/test/groovy/graphql/execution/preparsed/TestingPreparsedDocumentProvider.groovy index 17f4cf341e..30ae3d4f63 100644 --- a/src/test/groovy/graphql/execution/preparsed/TestingPreparsedDocumentProvider.groovy +++ b/src/test/groovy/graphql/execution/preparsed/TestingPreparsedDocumentProvider.groovy @@ -2,6 +2,7 @@ package graphql.execution.preparsed import graphql.ExecutionInput +import java.util.concurrent.CompletableFuture import java.util.function.Function @@ -9,9 +10,9 @@ class TestingPreparsedDocumentProvider implements PreparsedDocumentProvider { private Map cache = new HashMap<>() @Override - PreparsedDocumentEntry getDocument(ExecutionInput executionInput, Function parseAndValidateFunction) { + CompletableFuture getDocumentAsync(ExecutionInput executionInput, Function parseAndValidateFunction) { Function mapCompute = { key -> parseAndValidateFunction.apply(executionInput) } - return cache.computeIfAbsent(executionInput.query, mapCompute) + return CompletableFuture.completedFuture(cache.computeIfAbsent(executionInput.query, mapCompute)) } } diff --git a/src/test/groovy/graphql/execution/preparsed/persisted/ApolloPersistedQuerySupportTest.groovy b/src/test/groovy/graphql/execution/preparsed/persisted/ApolloPersistedQuerySupportTest.groovy index b9f4d49883..7b4a4cc168 100644 --- a/src/test/groovy/graphql/execution/preparsed/persisted/ApolloPersistedQuerySupportTest.groovy +++ b/src/test/groovy/graphql/execution/preparsed/persisted/ApolloPersistedQuerySupportTest.groovy @@ -5,6 +5,7 @@ import graphql.execution.preparsed.PreparsedDocumentEntry import graphql.parser.Parser import spock.lang.Specification +import java.util.concurrent.CompletableFuture import java.util.function.Function import static graphql.execution.preparsed.persisted.PersistedQuerySupport.PERSISTED_QUERY_MARKER @@ -29,11 +30,11 @@ class ApolloPersistedQuerySupportTest extends Specification { def parseCount = [:] @Override - PreparsedDocumentEntry getPersistedQueryDocument(Object persistedQueryId, ExecutionInput executionInput, PersistedQueryCacheMiss onCacheMiss) throws PersistedQueryNotFound { + CompletableFuture getPersistedQueryDocumentAsync(Object persistedQueryId, ExecutionInput executionInput, PersistedQueryCacheMiss onCacheMiss) throws PersistedQueryNotFound { keyCount.compute(persistedQueryId, { k, v -> v == null ? 1 : v + 1 }) PreparsedDocumentEntry entry = map.get(persistedQueryId) as PreparsedDocumentEntry if (entry != null) { - return entry + return CompletableFuture.completedFuture(entry) } parseCount.compute(persistedQueryId, { k, v -> v == null ? 1 : v + 1 }) @@ -44,7 +45,7 @@ class ApolloPersistedQuerySupportTest extends Specification { } def newDocEntry = onCacheMiss.apply(queryText) map.put(persistedQueryId, newDocEntry) - return newDocEntry + return CompletableFuture.completedFuture(newDocEntry) } } @@ -66,7 +67,7 @@ class ApolloPersistedQuerySupportTest extends Specification { when: def ei = mkEI(hashOne, PERSISTED_QUERY_MARKER) - def documentEntry = apolloSupport.getDocument(ei, engineParser) + def documentEntry = apolloSupport.getDocumentAsync(ei, engineParser).join() def doc = documentEntry.getDocument() then: printAstCompact(doc) == "{oneTwoThree}" @@ -75,7 +76,7 @@ class ApolloPersistedQuerySupportTest extends Specification { when: ei = mkEI(hashOne, PERSISTED_QUERY_MARKER) - documentEntry = apolloSupport.getDocument(ei, engineParser) + documentEntry = apolloSupport.getDocumentAsync(ei, engineParser).join() doc = documentEntry.getDocument() then: @@ -91,7 +92,7 @@ class ApolloPersistedQuerySupportTest extends Specification { when: def ei = ExecutionInput.newExecutionInput("query { normal }").build() - def documentEntry = apolloSupport.getDocument(ei, engineParser) + def documentEntry = apolloSupport.getDocumentAsync(ei, engineParser).join() def doc = documentEntry.getDocument() then: printAstCompact(doc) == "{normal}" @@ -105,7 +106,7 @@ class ApolloPersistedQuerySupportTest extends Specification { when: def ei = mkEI(hashOne, "{normal}") - def documentEntry = apolloSupport.getDocument(ei, engineParser) + def documentEntry = apolloSupport.getDocumentAsync(ei, engineParser).join() def doc = documentEntry.getDocument() then: printAstCompact(doc) == "{oneTwoThree}" @@ -121,7 +122,7 @@ class ApolloPersistedQuerySupportTest extends Specification { when: def ei = mkEI("nonExistedHash", PERSISTED_QUERY_MARKER) - def documentEntry = apolloSupport.getDocument(ei, engineParser) + def documentEntry = apolloSupport.getDocumentAsync(ei, engineParser).join() then: documentEntry.getDocument() == null def gqlError = documentEntry.getErrors()[0] @@ -137,21 +138,21 @@ class ApolloPersistedQuerySupportTest extends Specification { when: def ei = mkEI(hashOne, PERSISTED_QUERY_MARKER) - def documentEntry = apolloSupport.getDocument(ei, engineParser) + def documentEntry = apolloSupport.getDocumentAsync(ei, engineParser).join() def doc = documentEntry.getDocument() then: printAstCompact(doc) == "{oneTwoThree}" when: ei = mkEI(hashTwo, PERSISTED_QUERY_MARKER) - documentEntry = apolloSupport.getDocument(ei, engineParser) + documentEntry = apolloSupport.getDocumentAsync(ei, engineParser).join() doc = documentEntry.getDocument() then: printAstCompact(doc) == "{fourFiveSix}" when: ei = mkEI("nonExistent", PERSISTED_QUERY_MARKER) - documentEntry = apolloSupport.getDocument(ei, engineParser) + documentEntry = apolloSupport.getDocumentAsync(ei, engineParser).join() then: documentEntry.hasErrors() } @@ -161,7 +162,7 @@ class ApolloPersistedQuerySupportTest extends Specification { def apolloSupport = new ApolloPersistedQuerySupport(cache) when: def ei = mkEI("badHash", PERSISTED_QUERY_MARKER) - def docEntry = apolloSupport.getDocument(ei, engineParser) + def docEntry = apolloSupport.getDocumentAsync(ei, engineParser).join() then: docEntry.getDocument() == null def error = docEntry.getErrors()[0] diff --git a/src/test/groovy/graphql/execution/preparsed/persisted/InMemoryPersistedQueryCacheTest.groovy b/src/test/groovy/graphql/execution/preparsed/persisted/InMemoryPersistedQueryCacheTest.groovy index 88b9803de5..bffc4e7d9c 100644 --- a/src/test/groovy/graphql/execution/preparsed/persisted/InMemoryPersistedQueryCacheTest.groovy +++ b/src/test/groovy/graphql/execution/preparsed/persisted/InMemoryPersistedQueryCacheTest.groovy @@ -37,7 +37,7 @@ class InMemoryPersistedQueryCacheTest extends Specification { def ei = mkEI(hash, "query { oneTwoThreeFour }") when: - def getDoc = inMemCache.getPersistedQueryDocument(hash, ei, onMiss) + def getDoc = inMemCache.getPersistedQueryDocumentAsync(hash, ei, onMiss).join() def doc = getDoc.document then: printAstCompact(doc) == "{oneTwoThreeFour}" @@ -50,7 +50,7 @@ class InMemoryPersistedQueryCacheTest extends Specification { .build() def ei = mkEI(hash, PersistedQuerySupport.PERSISTED_QUERY_MARKER) when: - def getDoc = inMemCache.getPersistedQueryDocument(hash, ei, onMiss) + def getDoc = inMemCache.getPersistedQueryDocumentAsync(hash, ei, onMiss).join() def doc = getDoc.document then: printAstCompact(doc) == "{foo bar baz}" diff --git a/src/test/groovy/graphql/execution/pubsub/CapturingSubscriber.java b/src/test/groovy/graphql/execution/pubsub/CapturingSubscriber.java index e6c3de62d4..f736807941 100644 --- a/src/test/groovy/graphql/execution/pubsub/CapturingSubscriber.java +++ b/src/test/groovy/graphql/execution/pubsub/CapturingSubscriber.java @@ -55,4 +55,9 @@ public Throwable getThrowable() { public AtomicBoolean isDone() { return done; } + + public Subscription getSubscription() { + return subscription; + } + } diff --git a/src/test/groovy/graphql/execution/values/InputInterceptorTest.groovy b/src/test/groovy/graphql/execution/values/InputInterceptorTest.groovy index 037dd27486..de6914a50d 100644 --- a/src/test/groovy/graphql/execution/values/InputInterceptorTest.groovy +++ b/src/test/groovy/graphql/execution/values/InputInterceptorTest.groovy @@ -134,6 +134,6 @@ class InputInterceptorTest extends Specification { then: !er.errors.isEmpty() - er.errors[0].message == "Variable 'booleanArg' has an invalid value: Expected a value that can be converted to type 'Boolean' but it was a 'LinkedHashMap'" + er.errors[0].message == "Variable 'booleanArg' has an invalid value: Expected a Boolean input, but it was a 'LinkedHashMap'" } } diff --git a/src/test/groovy/graphql/incremental/IncrementalExecutionResultTest.groovy b/src/test/groovy/graphql/incremental/IncrementalExecutionResultTest.groovy new file mode 100644 index 0000000000..25eb197ec5 --- /dev/null +++ b/src/test/groovy/graphql/incremental/IncrementalExecutionResultTest.groovy @@ -0,0 +1,85 @@ +package graphql.incremental + +import graphql.execution.ResultPath +import spock.lang.Specification + +import static graphql.incremental.DeferPayload.newDeferredItem +import static graphql.incremental.IncrementalExecutionResultImpl.newIncrementalExecutionResult +import static graphql.incremental.StreamPayload.newStreamedItem + +class IncrementalExecutionResultTest extends Specification { + + def "sanity test to check IncrementalExecutionResultImpl builder and item builders work"() { + when: + def defer1 = newDeferredItem() + .label("homeWorldDefer") + .path(ResultPath.parse("/person")) + .data([homeWorld: "Tatooine"]) + .build() + + def stream1 = newStreamedItem() + .label("filmsStream") + .path(ResultPath.parse("/person/films[1]")) + .items([[title: "The Empire Strikes Back"]]) + .build() + + def stream2 = newStreamedItem() + .label("filmsStream") + .path(ResultPath.parse("/person/films[2]")) + .items([[title: "Return of the Jedi"]]) + .build() + + def result = newIncrementalExecutionResult() + .data([ + person: [ + name : "Luke Skywalker", + films: [ + [title: "A New Hope"] + ] + ] + ]) + .hasNext(true) + .incremental([defer1, stream1, stream2]) + .extensions([some: "map"]) + .build() + + def toSpec = result.toSpecification() + + then: + toSpec == [ + data : [person: [name: "Luke Skywalker", films: [[title: "A New Hope"]]]], + extensions: [some: "map"], + hasNext : true, + incremental: [ + [path: ["person"], label: "homeWorldDefer", data: [homeWorld: "Tatooine"]], + [path: ["person", "films", 1], label: "filmsStream", items: [[title: "The Empire Strikes Back"]]], + [path: ["person", "films", 2], label: "filmsStream", items: [[title: "Return of the Jedi"]]], + ] + ] + + } + def "sanity test to check DelayedIncrementalPartialResult builder works"() { + when: + def deferredItem = newDeferredItem() + .label("homeWorld") + .path(ResultPath.parse("/person")) + .data([homeWorld: "Tatooine"]) + .build() + + def result = DelayedIncrementalPartialResultImpl.newIncrementalExecutionResult() + .incrementalItems([deferredItem]) + .hasNext(false) + .extensions([some: "map"]) + .build() + + def toSpec = result.toSpecification() + + then: + toSpec == [ + incremental: [[path: ["person"], label: "homeWorld", data: [homeWorld: "Tatooine"]]], + extensions: [some: "map"], + hasNext : false, + ] + + } +} diff --git a/src/test/groovy/graphql/introspection/GoodFaithIntrospectionTest.groovy b/src/test/groovy/graphql/introspection/GoodFaithIntrospectionTest.groovy new file mode 100644 index 0000000000..c2d9b2dc87 --- /dev/null +++ b/src/test/groovy/graphql/introspection/GoodFaithIntrospectionTest.groovy @@ -0,0 +1,231 @@ +package graphql.introspection + +import graphql.ExecutionInput +import graphql.ExecutionResult +import graphql.TestUtil +import graphql.execution.CoercedVariables +import graphql.language.Document +import graphql.normalized.ExecutableNormalizedOperationFactory +import spock.lang.Specification + +class GoodFaithIntrospectionTest extends Specification { + + def graphql = TestUtil.graphQL("type Query { normalField : String }").build() + + def setup() { + GoodFaithIntrospection.enabledJvmWide(true) + } + + def cleanup() { + GoodFaithIntrospection.enabledJvmWide(true) + } + + def "standard introspection query is inside limits just in general"() { + + when: + Document document = TestUtil.toDocument(IntrospectionQuery.INTROSPECTION_QUERY) + def eno = ExecutableNormalizedOperationFactory.createExecutableNormalizedOperation(graphql.getGraphQLSchema(), document, + "IntrospectionQuery", CoercedVariables.emptyVariables()) + + then: + eno.getOperationFieldCount() < GoodFaithIntrospection.GOOD_FAITH_MAX_FIELDS_COUNT // currently 189 + eno.getOperationDepth() < GoodFaithIntrospection.GOOD_FAITH_MAX_DEPTH_COUNT // currently 13 + } + + def "test asking for introspection in good faith"() { + + when: + ExecutionResult er = graphql.execute(IntrospectionQuery.INTROSPECTION_QUERY) + then: + er.errors.isEmpty() + } + + def "test asking for introspection in bad faith"() { + + when: + ExecutionResult er = graphql.execute(query) + then: + !er.errors.isEmpty() + er.errors[0] instanceof GoodFaithIntrospection.BadFaithIntrospectionError + + where: + query | _ + // long attack + """ + query badActor{__schema{types{fields{type{fields{type{fields{type{fields{type{name}}}}}}}}}}} + """ | _ + // a case for __Type interfaces + """ query badActor { + __schema { types { interfaces { fields { type { interfaces { name } } } } } } + } + """ | _ + // a case for __Type inputFields + """ query badActor { + __schema { types { inputFields { type { inputFields { name }}}}} + } + """ | _ + // a case for __Type possibleTypes + """ query badActor { + __schema { types { inputFields { type { inputFields { name }}}}} + } + """ | _ + // a case leading from __InputValue + """ query badActor { + __schema { types { fields { args { type { name fields { name }}}}}} + } + """ | _ + // a case leading from __Field + """ query badActor { + __schema { types { fields { type { name fields { name }}}}} + } + """ | _ + // a case for __type + """ query badActor { + __type(name : "t") { name } + alias1 : __type(name : "t1") { name } + } + """ | _ + // a case for __type with aliases + """ query badActor { + a1: __type(name : "t") { name } + a2 : __type(name : "t1") { name } + } + """ | _ + // a case for schema repeated - dont ask twice + """ query badActor { + __schema { types { name} } + alias1 : __schema { types { name} } + } + """ | _ + // a case for used aliases + """ query badActor { + a1: __schema { types { name} } + a2 : __schema { types { name} } + } + """ | _ + + } + + def "mixed general queries and introspections will be stopped anyway"() { + def query = """ + query goodAndBad { + normalField + __schema{types{fields{type{fields{type{fields{type{fields{type{name}}}}}}}}}} + } + """ + + when: + ExecutionResult er = graphql.execute(query) + then: + !er.errors.isEmpty() + er.errors[0] instanceof GoodFaithIntrospection.BadFaithIntrospectionError + er.data == null // it stopped hard - it did not continue to normal business + } + + def "can be disabled"() { + when: + def currentState = GoodFaithIntrospection.isEnabledJvmWide() + + then: + currentState + + when: + def prevState = GoodFaithIntrospection.enabledJvmWide(false) + + then: + prevState + + when: + ExecutionResult er = graphql.execute("query badActor{__schema{types{fields{type{fields{type{fields{type{fields{type{name}}}}}}}}}}}") + + then: + er.errors.isEmpty() + } + + def "can be disabled per request"() { + when: + def context = [(GoodFaithIntrospection.GOOD_FAITH_INTROSPECTION_DISABLED): true] + ExecutionInput executionInput = ExecutionInput.newExecutionInput("query badActor{__schema{types{fields{type{fields{type{fields{type{fields{type{name}}}}}}}}}}}") + .graphQLContext(context).build() + ExecutionResult er = graphql.execute(executionInput) + + then: + er.errors.isEmpty() + + when: + context = [(GoodFaithIntrospection.GOOD_FAITH_INTROSPECTION_DISABLED): false] + executionInput = ExecutionInput.newExecutionInput("query badActor{__schema{types{fields{type{fields{type{fields{type{fields{type{name}}}}}}}}}}}") + .graphQLContext(context).build() + er = graphql.execute(executionInput) + + then: + !er.errors.isEmpty() + er.errors[0] instanceof GoodFaithIntrospection.BadFaithIntrospectionError + } + + def "can stop deep queries"() { + + when: + def query = createDeepQuery(depth) + def then = System.currentTimeMillis() + ExecutionResult er = graphql.execute(query) + def ms = System.currentTimeMillis() - then + + then: + !er.errors.isEmpty() + er.errors[0].class == targetError + er.data == null // it stopped hard - it did not continue to normal business + println "Took " + ms + "ms" + + where: + depth | targetError + 2 | GoodFaithIntrospection.BadFaithIntrospectionError.class + 10 | GoodFaithIntrospection.BadFaithIntrospectionError.class + 15 | GoodFaithIntrospection.BadFaithIntrospectionError.class + 20 | GoodFaithIntrospection.BadFaithIntrospectionError.class + 25 | GoodFaithIntrospection.BadFaithIntrospectionError.class + 50 | GoodFaithIntrospection.BadFaithIntrospectionError.class + 100 | GoodFaithIntrospection.BadFaithIntrospectionError.class + } + + String createDeepQuery(int depth = 25) { + def result = """ +query test { + __schema { + types { + ...F1 + } + } +} +""" + for (int i = 1; i < depth; i++) { + result += """ + fragment F$i on __Type { + fields { + type { + ...F${i + 1} + } + } + + ofType { + ...F${i + 1} + } +} + + +""" + } + result += """ + fragment F$depth on __Type { + fields { + type { +name + } + } +} + + +""" + return result + } +} diff --git a/src/test/groovy/graphql/introspection/IntrospectionTest.groovy b/src/test/groovy/graphql/introspection/IntrospectionTest.groovy index 62e27138d1..8a70c68618 100644 --- a/src/test/groovy/graphql/introspection/IntrospectionTest.groovy +++ b/src/test/groovy/graphql/introspection/IntrospectionTest.groovy @@ -1,7 +1,8 @@ package graphql.introspection - +import graphql.ExecutionInput import graphql.TestUtil +import graphql.execution.AsyncSerialExecutionStrategy import graphql.schema.DataFetcher import graphql.schema.FieldCoordinates import graphql.schema.GraphQLCodeRegistry @@ -22,6 +23,14 @@ import static graphql.schema.GraphQLSchema.newSchema class IntrospectionTest extends Specification { + def setup() { + Introspection.enabledJvmWide(true) + } + + def cleanup() { + Introspection.enabledJvmWide(true) + } + def "bug 1186 - introspection depth check"() { def spec = ''' type Query { @@ -547,7 +556,7 @@ class IntrospectionTest extends Specification { then: def oldQuery = oldIntrospectionQuery.replaceAll("\\s+", "") - def newQuery = newIntrospectionQuery.replaceAll("\\s+","") + def newQuery = newIntrospectionQuery.replaceAll("\\s+", "") oldQuery == newQuery } @@ -688,4 +697,111 @@ class IntrospectionTest extends Specification { queryType["isOneOf"] == null } + def "jvm wide enablement"() { + def graphQL = TestUtil.graphQL("type Query { f : String } ").build() + + when: + def er = graphQL.execute(IntrospectionQuery.INTROSPECTION_QUERY) + + then: + er.errors.isEmpty() + + when: + Introspection.enabledJvmWide(false) + er = graphQL.execute(IntrospectionQuery.INTROSPECTION_QUERY) + + then: + er.errors[0] instanceof IntrospectionDisabledError + er.errors[0].getErrorType().toString() == "IntrospectionDisabled" + + when: + Introspection.enabledJvmWide(true) + er = graphQL.execute(IntrospectionQuery.INTROSPECTION_QUERY) + + then: + er.errors.isEmpty() + } + + def "per request enablement"() { + def graphQL = TestUtil.graphQL("type Query { f : String } ").build() + + when: + // null context + def ei = ExecutionInput.newExecutionInput(IntrospectionQuery.INTROSPECTION_QUERY) + .build() + def er = graphQL.execute(ei) + + then: + er.errors.isEmpty() + + when: + ei = ExecutionInput.newExecutionInput(IntrospectionQuery.INTROSPECTION_QUERY) + .graphQLContext(Map.of(Introspection.INTROSPECTION_DISABLED, false)).build() + er = graphQL.execute(ei) + + then: + er.errors.isEmpty() + + when: + ei = ExecutionInput.newExecutionInput(IntrospectionQuery.INTROSPECTION_QUERY) + .graphQLContext(Map.of(Introspection.INTROSPECTION_DISABLED, true)).build() + er = graphQL.execute(ei) + + then: + er.errors[0] instanceof IntrospectionDisabledError + er.errors[0].getErrorType().toString() == "IntrospectionDisabled" + } + + def "mixed schema and other fields stop early"() { + def graphQL = TestUtil.graphQL("type Query { normalField : String } ").build() + + def query = """ + query goodAndBad { + normalField + __schema{ types{ fields { name }}} + } + """ + + when: + def er = graphQL.execute(query) + + then: + er.errors.isEmpty() + + when: + Introspection.enabledJvmWide(false) + er = graphQL.execute(query) + + then: + er.errors[0] instanceof IntrospectionDisabledError + er.errors[0].getErrorType().toString() == "IntrospectionDisabled" + er.data == null // stops hard + } + + def "AsyncSerialExecutionStrategy with jvm wide enablement"() { + def graphQL = TestUtil.graphQL("type Query { f : String } ") + .queryExecutionStrategy(new AsyncSerialExecutionStrategy()).build() + + when: + def er = graphQL.execute(IntrospectionQuery.INTROSPECTION_QUERY) + + then: + er.errors.isEmpty() + + when: + Introspection.enabledJvmWide(false) + er = graphQL.execute(IntrospectionQuery.INTROSPECTION_QUERY) + + then: + er.errors[0] instanceof IntrospectionDisabledError + er.errors[0].getErrorType().toString() == "IntrospectionDisabled" + + when: + Introspection.enabledJvmWide(true) + er = graphQL.execute(IntrospectionQuery.INTROSPECTION_QUERY) + + then: + er.errors.isEmpty() + } + } diff --git a/src/test/groovy/graphql/language/SerialisationTest.groovy b/src/test/groovy/graphql/language/SerialisationTest.groovy index 9a78e90913..8bd4ae46af 100644 --- a/src/test/groovy/graphql/language/SerialisationTest.groovy +++ b/src/test/groovy/graphql/language/SerialisationTest.groovy @@ -112,7 +112,11 @@ class SerialisationTest extends Specification { when: GraphQLError syntaxError1 = new InvalidSyntaxError(srcLoc(1, 1), "Bad Syntax 1") - GraphQLError validationError2 = new ValidationError(ValidationErrorType.FieldUndefined, srcLoc(2, 2), "Bad Query 2") + GraphQLError validationError2 = ValidationError.newValidationError() + .validationErrorType(ValidationErrorType.FieldUndefined) + .sourceLocation(srcLoc(2, 2)) + .description("Bad Query 2") + .build() def originalEntry = new PreparsedDocumentEntry([syntaxError1, validationError2]) PreparsedDocumentEntry newEntry = serialisedDownAndBack(originalEntry) @@ -146,7 +150,11 @@ class SerialisationTest extends Specification { Document originalDoc = TestUtil.parseQuery(query) GraphQLError syntaxError1 = new InvalidSyntaxError(srcLoc(1, 1), "Bad Syntax 1") - GraphQLError validationError2 = new ValidationError(ValidationErrorType.FieldUndefined, srcLoc(2, 2), "Bad Query 2") + GraphQLError validationError2 = ValidationError.newValidationError() + .validationErrorType(ValidationErrorType.FieldUndefined) + .sourceLocation(srcLoc(2, 2)) + .description("Bad Query 2") + .build() def originalEntry = new PreparsedDocumentEntry(originalDoc, [syntaxError1, validationError2]) def originalAst = AstPrinter.printAst(originalEntry.getDocument()) PreparsedDocumentEntry newEntry = serialisedDownAndBack(originalEntry) diff --git a/src/test/groovy/graphql/normalized/ExecutableNormalizedFieldTest.groovy b/src/test/groovy/graphql/normalized/ExecutableNormalizedFieldTest.groovy index dc3db5daa4..6debbb2ff2 100644 --- a/src/test/groovy/graphql/normalized/ExecutableNormalizedFieldTest.groovy +++ b/src/test/groovy/graphql/normalized/ExecutableNormalizedFieldTest.groovy @@ -48,8 +48,7 @@ class ExecutableNormalizedFieldTest extends Specification { """ Document document = TestUtil.parseQuery(query) - ExecutableNormalizedOperationFactory normalizedOperationFactory = new ExecutableNormalizedOperationFactory() - def normalizedOperation = normalizedOperationFactory.createExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) + def normalizedOperation = ExecutableNormalizedOperationFactory.createExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) def pets = normalizedOperation.getTopLevelFields()[0] def allChildren = pets.getChildren() diff --git a/src/test/groovy/graphql/normalized/ExecutableNormalizedOperationFactoryDeferTest.groovy b/src/test/groovy/graphql/normalized/ExecutableNormalizedOperationFactoryDeferTest.groovy new file mode 100644 index 0000000000..f861fcf222 --- /dev/null +++ b/src/test/groovy/graphql/normalized/ExecutableNormalizedOperationFactoryDeferTest.groovy @@ -0,0 +1,908 @@ +package graphql.normalized + +import graphql.AssertException +import graphql.ExecutionInput +import graphql.GraphQL +import graphql.TestUtil +import graphql.execution.RawVariables +import graphql.language.Document +import graphql.schema.GraphQLSchema +import graphql.util.TraversalControl +import graphql.util.Traverser +import graphql.util.TraverserContext +import graphql.util.TraverserVisitorStub +import spock.lang.Specification + +class ExecutableNormalizedOperationFactoryDeferTest extends Specification { + String schema = """ + directive @defer(if: Boolean, label: String) on FRAGMENT_SPREAD | INLINE_FRAGMENT + + type Query { + dog: Dog + animal: Animal + mammal: Mammal + } + + interface LivingThing { + age: Int + } + + interface Animal implements LivingThing { + name: String + age: Int + } + + type Dog implements Animal & LivingThing { + name: String + age: Int + breed: String + owner: Person + } + + type Cat implements Animal & LivingThing { + name: String + age: Int + breed: String + color: String + siblings: [Cat] + } + + type Fish implements Animal & LivingThing { + name: String + age: Int + } + + type Person { + firstname: String + lastname: String + bestFriend: Person + } + + union Mammal = Dog | Cat + """ + + GraphQLSchema graphQLSchema = TestUtil.schema(schema) + + def "defer on a single field via inline fragment without type"() { + given: + + String query = ''' + query q { + dog { + name + ... @defer(label: "breed-defer") { + breed + } + } + } + ''' + + Map variables = [:] + + when: + List printedTree = executeQueryAndPrintTree(query, variables) + + then: + printedTree == ['Query.dog', + 'Dog.name', + 'Dog.breed defer{[label=breed-defer;types=[Dog]]}', + ] + } + + def "fragment on interface field with no type"() { + given: + + String query = ''' + query q { + animal { + ... @defer { + name + } + } + } + ''' + + Map variables = [:] + + when: + List printedTree = executeQueryAndPrintTree(query, variables) + + then: + printedTree == ['Query.animal', + "[Cat, Dog, Fish].name defer{[label=null;types=[Cat, Dog, Fish]]}", + ] + } + + def "fragments on non-conditional fields"() { + given: + + String query = ''' + query q { + animal { + ... on Cat @defer { + name + } + ... on Dog @defer { + name + } + ... on Animal @defer { + name + } + } + } + ''' + + Map variables = [:] + + when: + List printedTree = executeQueryAndPrintTree(query, variables) + + then: + printedTree == ['Query.animal', + "[Cat, Dog, Fish].name defer{[label=null;types=[Cat]],[label=null;types=[Dog]],[label=null;types=[Cat, Dog, Fish]]}", + ] + } + + def "fragments on subset of non-conditional fields"() { + given: + + String query = ''' + query q { + animal { + ... on Cat @defer { + name + } + ... on Dog @defer { + name + } + ... on Fish { + name + } + } + } + ''' + + Map variables = [:] + + when: + List printedTree = executeQueryAndPrintTree(query, variables) + + then: + printedTree == ['Query.animal', + "[Cat, Dog, Fish].name defer{[label=null;types=[Cat]],[label=null;types=[Dog]]}", + ] + } + + def "field on multiple defer declarations is associated with "() { + given: + String query = ''' + query q { + dog { + ... @defer { + name + age + } + ... @defer { + age + } + } + } + ''' + Map variables = [:] + + when: + def executableNormalizedOperation = createExecutableNormalizedOperations(query, variables); + + List printedTree = printTreeWithIncrementalExecutionDetails(executableNormalizedOperation) + + then: + + def nameField = findField(executableNormalizedOperation, "Dog", "name") + def ageField = findField(executableNormalizedOperation, "Dog", "age") + + nameField.deferredExecutions.size() == 1 + ageField.deferredExecutions.size() == 2 + + // age field is associated with 2 defer executions, one of then is shared with "name" the other isn't + ageField.deferredExecutions.any { + it == nameField.deferredExecutions[0] + } + + ageField.deferredExecutions.any { + it != nameField.deferredExecutions[0] + } + + printedTree == ['Query.dog', + "Dog.name defer{[label=null;types=[Dog]]}", + "Dog.age defer{[label=null;types=[Dog]],[label=null;types=[Dog]]}", + ] + } + + def "fragment on interface"() { + given: + + String query = ''' + query q { + animal { + ... on Animal @defer { + name + } + } + } + ''' + + Map variables = [:] + + when: + List printedTree = executeQueryAndPrintTree(query, variables) + + then: + printedTree == ['Query.animal', + '[Cat, Dog, Fish].name defer{[label=null;types=[Cat, Dog, Fish]]}', + ] + } + + def "fragment on distant interface"() { + given: + + String query = ''' + query q { + animal { + ... on LivingThing @defer { + age + } + } + } + ''' + + Map variables = [:] + + when: + List printedTree = executeQueryAndPrintTree(query, variables) + + then: + printedTree == ['Query.animal', + '[Cat, Dog, Fish].age defer{[label=null;types=[Cat, Dog, Fish]]}', + ] + } + + def "fragment on union"() { + given: + + String query = ''' + query q { + mammal { + ... on Dog @defer { + name + breed + } + ... on Cat @defer { + name + breed + } + } + } + ''' + + Map variables = [:] + + when: + List printedTree = executeQueryAndPrintTree(query, variables) + + then: + printedTree == ['Query.mammal', + '[Dog, Cat].name defer{[label=null;types=[Cat]],[label=null;types=[Dog]]}', + 'Dog.breed defer{[label=null;types=[Dog]]}', + 'Cat.breed defer{[label=null;types=[Cat]]}', + ] + } + + def "fragments on interface"() { + given: + + String query = ''' + query q { + animal { + ... on Animal @defer { + name + } + ... on Animal @defer { + age + } + } + } + ''' + + Map variables = [:] + + when: + List printedTree = executeQueryAndPrintTree(query, variables) + + then: + printedTree == ['Query.animal', + '[Cat, Dog, Fish].name defer{[label=null;types=[Cat, Dog, Fish]]}', + '[Cat, Dog, Fish].age defer{[label=null;types=[Cat, Dog, Fish]]}', + ] + } + + def "defer on a subselection of non-conditional fields"() { + given: + + String query = ''' + query q { + animal { + ... on Cat @defer { + name + } + ... on Dog { + name + } + } + } + ''' + + Map variables = [:] + + when: + List printedTree = executeQueryAndPrintTree(query, variables) + + then: + printedTree == ['Query.animal', + '[Cat, Dog].name defer{[label=null;types=[Cat]]}', + ] + } + + def "fragments on conditional fields"() { + given: + + String query = ''' + query q { + animal { + ... on Cat @defer { + breed + } + ... on Dog @defer { + breed + } + } + } + ''' + + Map variables = [:] + + when: + List printedTree = executeQueryAndPrintTree(query, variables) + + then: + printedTree == ['Query.animal', + 'Cat.breed defer{[label=null;types=[Cat]]}', + 'Dog.breed defer{[label=null;types=[Dog]]}' + ] + } + + def "defer on a single field via inline fragment with type"() { + given: + + String query = ''' + query q { + dog { + name + ... on Dog @defer(label: "breed-defer") { + breed + } + } + } + ''' + + Map variables = [:] + + when: + List printedTree = executeQueryAndPrintTree(query, variables) + + then: + printedTree == ['Query.dog', + 'Dog.name', + 'Dog.breed defer{[label=breed-defer;types=[Dog]]}', + ] + } + + def "1 defer on 2 fields"() { + given: + String query = ''' + query q { + animal { + ... @defer { + name + } + + ... on Dog @defer { + name + breed + } + + ... on Cat @defer { + name + breed + } + } + } + ''' + + Map variables = [:] + + when: + def executableNormalizedOperation = createExecutableNormalizedOperations(query, variables); + + List printedTree = printTreeWithIncrementalExecutionDetails(executableNormalizedOperation) + + then: "should result in the same instance of defer block" + def nameField = findField(executableNormalizedOperation,"[Cat, Dog, Fish]","name") + def dogBreedField = findField(executableNormalizedOperation, "Dog", "breed") + def catBreedField = findField(executableNormalizedOperation, "Cat", "breed") + + nameField.deferredExecutions.size() == 3 + dogBreedField.deferredExecutions.size() == 1 + catBreedField.deferredExecutions.size() == 1 + + // nameField should share a defer block with each of the other fields + nameField.deferredExecutions.any { + it == dogBreedField.deferredExecutions[0] + } + nameField.deferredExecutions.any { + it == catBreedField.deferredExecutions[0] + } + // also, nameField should have a defer block that is not shared with any other field + nameField.deferredExecutions.any { + it != dogBreedField.deferredExecutions[0] && + it != catBreedField.deferredExecutions[0] + } + + printedTree == ['Query.animal', + '[Cat, Dog, Fish].name defer{[label=null;types=[Cat]],[label=null;types=[Dog]],[label=null;types=[Cat, Dog, Fish]]}', + 'Dog.breed defer{[label=null;types=[Dog]]}', + 'Cat.breed defer{[label=null;types=[Cat]]}', + ] + } + + def "2 defers on 2 fields"() { + given: + + String query = ''' + query q { + dog { + ... @defer{ + name + } + ... @defer{ + breed + } + } + } + ''' + + Map variables = [:] + + when: + def executableNormalizedOperation = createExecutableNormalizedOperations(query, variables); + + List printedTree = printTreeWithIncrementalExecutionDetails(executableNormalizedOperation) + + then: "should result in 2 different instances of defer" + def nameField = findField(executableNormalizedOperation, "Dog", "name") + def breedField = findField(executableNormalizedOperation, "Dog", "breed") + + nameField.deferredExecutions.size() == 1 + breedField.deferredExecutions.size() == 1 + + // different label instances + nameField.deferredExecutions[0] != breedField.deferredExecutions[0] + + printedTree == ['Query.dog', + 'Dog.name defer{[label=null;types=[Dog]]}', + 'Dog.breed defer{[label=null;types=[Dog]]}', + ] + } + + def "defer on a fragment definition"() { + given: + + String query = ''' + query q { + dog { + ... DogFrag @defer(label: "breed-defer") + } + } + + fragment DogFrag on Dog { + name + breed + } + ''' + + Map variables = [:] + + when: + List printedTree = executeQueryAndPrintTree(query, variables) + + then: + printedTree == ['Query.dog', + 'Dog.name defer{[label=breed-defer;types=[Dog]]}', + 'Dog.breed defer{[label=breed-defer;types=[Dog]]}', + ] + } + + def "multiple defer on same field with different labels"() { + given: + + String query = ''' + query q { + dog { + ... @defer(label: "name-defer") { + name + } + + ... @defer(label: "another-name-defer") { + name + } + } + } + + ''' + + Map variables = [:] + + when: + List printedTree = executeQueryAndPrintTree(query, variables) + + then: + printedTree == ['Query.dog', + 'Dog.name defer{[label=another-name-defer;types=[Dog]],[label=name-defer;types=[Dog]]}' + ] + } + + def "multiple fields and a single defer"() { + given: + + String query = ''' + query q { + dog { + ... @defer(label: "name-defer") { + name + } + + ... { + name + } + } + } + + ''' + + Map variables = [:] + + when: + List printedTree = executeQueryAndPrintTree(query, variables) + + then: + printedTree == ['Query.dog', + 'Dog.name defer{[label=name-defer;types=[Dog]]}', + ] + } + + def "multiple fields and a single defer - no label"() { + given: + + String query = ''' + query q { + dog { + ... @defer { + name + } + + ... { + name + } + } + } + + ''' + + Map variables = [:] + + when: + List printedTree = executeQueryAndPrintTree(query, variables) + + then: + printedTree == ['Query.dog', + 'Dog.name defer{[label=null;types=[Dog]]}', + ] + } + + def "multiple fields and multiple defers - no label"() { + given: + + String query = ''' + query q { + dog { + ... @defer { + name + } + + ... @defer { + name + } + } + } + + ''' + + Map variables = [:] + + when: + List printedTree = executeQueryAndPrintTree(query, variables) + + then: + printedTree == ['Query.dog', + 'Dog.name defer{[label=null;types=[Dog]],[label=null;types=[Dog]]}', + ] + } + + def "nested defers - no label"() { + given: + + String query = ''' + query q { + dog { + ... @defer { + name + owner { + firstname + ... @defer { + lastname + } + } + } + } + } + + ''' + + Map variables = [:] + + when: + List printedTree = executeQueryAndPrintTree(query, variables) + + then: + printedTree == ['Query.dog', + 'Dog.name defer{[label=null;types=[Dog]]}', + 'Dog.owner defer{[label=null;types=[Dog]]}', + 'Person.firstname', + 'Person.lastname defer{[label=null;types=[Person]]}', + ] + } + + def "nested defers - with labels"() { + given: + + String query = ''' + query q { + dog { + ... @defer(label:"dog-defer") { + name + owner { + firstname + ... @defer(label: "lastname-defer") { + lastname + } + } + } + } + } + ''' + + Map variables = [:] + + when: + List printedTree = executeQueryAndPrintTree(query, variables) + + then: + printedTree == ['Query.dog', + 'Dog.name defer{[label=dog-defer;types=[Dog]]}', + 'Dog.owner defer{[label=dog-defer;types=[Dog]]}', + 'Person.firstname', + 'Person.lastname defer{[label=lastname-defer;types=[Person]]}', + ] + } + + def "nested defers - with named spreads"() { + given: + + String query = ''' + query q { + animal { + name + ... on Dog @defer(label:"dog-defer") { + owner { + firstname + ... @defer(label: "lastname-defer") { + lastname + } + } + } + } + } + ''' + + Map variables = [:] + + when: + List printedTree = executeQueryAndPrintTree(query, variables) + + then: + printedTree == ['Query.animal', + '[Cat, Dog, Fish].name', + 'Dog.owner defer{[label=dog-defer;types=[Dog]]}', + 'Person.firstname', + 'Person.lastname defer{[label=lastname-defer;types=[Person]]}', + ] + } + + def "nesting defer blocks that would always result in no data are ignored"() { + given: + + String query = ''' + query q { + dog { + ... @defer(label: "one") { + ... @defer(label: "two") { + ... @defer(label: "three") { + name + } + } + } + } + } + ''' + + Map variables = [:] + + when: + List printedTree = executeQueryAndPrintTree(query, variables) + + then: + printedTree == ['Query.dog', + 'Dog.name defer{[label=three;types=[Dog]]}', + ] + } + + def "'if' argument is respected"() { + given: + + String query = ''' + query q { + dog { + ... @defer(if: false, label: "name-defer") { + name + } + + ... @defer(if: true, label: "another-name-defer") { + name + } + } + } + + ''' + + Map variables = [:] + + when: + List printedTree = executeQueryAndPrintTree(query, variables) + + then: + printedTree == ['Query.dog', + 'Dog.name defer{[label=another-name-defer;types=[Dog]]}', + ] + } + + def "'if' argument is respected when value is passed through variable"() { + given: + + String query = ''' + query q($if1: Boolean, $if2: Boolean) { + dog { + ... @defer(if: $if1, label: "name-defer") { + name + } + + ... @defer(if: $if2, label: "another-name-defer") { + name + } + } + } + + ''' + + Map variables = [if1: false, if2: true] + + when: + List printedTree = executeQueryAndPrintTree(query, variables) + + then: + printedTree == ['Query.dog', + 'Dog.name defer{[label=another-name-defer;types=[Dog]]}', + ] + } + + private ExecutableNormalizedOperation createExecutableNormalizedOperations(String query, Map variables) { + assertValidQuery(graphQLSchema, query, variables) + Document document = TestUtil.parseQuery(query) + ExecutableNormalizedOperationFactory dependencyGraph = new ExecutableNormalizedOperationFactory() + + return dependencyGraph.createExecutableNormalizedOperationWithRawVariables( + graphQLSchema, + document, + null, + RawVariables.of(variables), + ExecutableNormalizedOperationFactory.Options.defaultOptions().deferSupport(true), + ) + } + + private List executeQueryAndPrintTree(String query, Map variables) { + assertValidQuery(graphQLSchema, query, variables) + Document document = TestUtil.parseQuery(query) + ExecutableNormalizedOperationFactory dependencyGraph = new ExecutableNormalizedOperationFactory() + + def tree = dependencyGraph.createExecutableNormalizedOperationWithRawVariables( + graphQLSchema, + document, + null, + RawVariables.of(variables), + ExecutableNormalizedOperationFactory.Options.defaultOptions().deferSupport(true), + ) + return printTreeWithIncrementalExecutionDetails(tree) + } + + private List printTreeWithIncrementalExecutionDetails(ExecutableNormalizedOperation queryExecutionTree) { + def result = [] + Traverser traverser = Traverser.depthFirst({ it.getChildren() }) + + traverser.traverse(queryExecutionTree.getTopLevelFields(), new TraverserVisitorStub() { + @Override + TraversalControl enter(TraverserContext context) { + ExecutableNormalizedField queryExecutionField = context.thisNode() + result << queryExecutionField.printDetails() + printDeferExecutionDetails(queryExecutionField) + return TraversalControl.CONTINUE + } + + String printDeferExecutionDetails(ExecutableNormalizedField field) { + def deferExecutions = field.deferredExecutions + if (deferExecutions == null || deferExecutions.isEmpty()) { + return "" + } + + def deferLabels = new ArrayList<>(deferExecutions) + .sort { it.label } + .sort { it.possibleTypes.collect {it.name} } + .collect { "[label=${it.label};types=${it.possibleTypes.collect{it.name}.sort()}]" } + .join(",") + + return " defer{${deferLabels}}" + } + }) + + result + } + + private static void assertValidQuery(GraphQLSchema graphQLSchema, String query, Map variables = [:]) { + GraphQL graphQL = GraphQL.newGraphQL(graphQLSchema).build() + def ei = ExecutionInput.newExecutionInput(query).variables(variables).build() + assert graphQL.execute(ei).errors.size() == 0 + } + + private static ExecutableNormalizedField findField(ExecutableNormalizedOperation operation, String objectTypeNames, String fieldName) { + return operation.normalizedFieldToMergedField + .collect { it.key } + .find { it.fieldName == fieldName + && it.objectTypeNamesToString() == objectTypeNames} + } +} diff --git a/src/test/groovy/graphql/normalized/ExecutableNormalizedOperationFactoryTest.groovy b/src/test/groovy/graphql/normalized/ExecutableNormalizedOperationFactoryTest.groovy index 89ea656b81..2b9f146721 100644 --- a/src/test/groovy/graphql/normalized/ExecutableNormalizedOperationFactoryTest.groovy +++ b/src/test/groovy/graphql/normalized/ExecutableNormalizedOperationFactoryTest.groovy @@ -3,10 +3,12 @@ package graphql.normalized import graphql.ExecutionInput import graphql.GraphQL import graphql.TestUtil +import graphql.execution.AbortExecutionException import graphql.execution.CoercedVariables import graphql.execution.MergedField import graphql.execution.RawVariables import graphql.execution.directives.QueryAppliedDirective +import graphql.introspection.IntrospectionQuery import graphql.language.Document import graphql.language.Field import graphql.language.FragmentDefinition @@ -27,7 +29,10 @@ import static graphql.language.AstPrinter.printAst import static graphql.parser.Parser.parseValue import static graphql.schema.FieldCoordinates.coordinates -class ExecutableNormalizedOperationFactoryTest extends Specification { +abstract class ExecutableNormalizedOperationFactoryTest extends Specification { + static boolean deferSupport + + def "test"() { String schema = """ type Query{ @@ -112,8 +117,7 @@ type Dog implements Animal{ Document document = TestUtil.parseQuery(query) - ExecutableNormalizedOperationFactory dependencyGraph = new ExecutableNormalizedOperationFactory() - def tree = dependencyGraph.createExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) + def tree = localCreateExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) def printedTree = printTreeWithLevelInfo(tree, graphQLSchema) expect: @@ -198,8 +202,7 @@ type Dog implements Animal{ Document document = TestUtil.parseQuery(query) - ExecutableNormalizedOperationFactory dependencyGraph = new ExecutableNormalizedOperationFactory() - def tree = dependencyGraph.createExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) + def tree = localCreateExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) def printedTree = printTreeWithLevelInfo(tree, graphQLSchema) expect: @@ -278,8 +281,7 @@ type Dog implements Animal{ Document document = TestUtil.parseQuery(query) - ExecutableNormalizedOperationFactory dependencyGraph = new ExecutableNormalizedOperationFactory() - def tree = dependencyGraph.createExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) + def tree = localCreateExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) def printedTree = printTreeWithLevelInfo(tree, graphQLSchema) expect: @@ -329,8 +331,8 @@ type Dog implements Animal{ Document document = TestUtil.parseQuery(query) - ExecutableNormalizedOperationFactory dependencyGraph = new ExecutableNormalizedOperationFactory() - def tree = dependencyGraph.createExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) + + def tree = localCreateExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) def printedTree = printTree(tree) expect: @@ -372,8 +374,8 @@ type Dog implements Animal{ Document document = TestUtil.parseQuery(query) - ExecutableNormalizedOperationFactory dependencyGraph = new ExecutableNormalizedOperationFactory() - def tree = dependencyGraph.createExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) + + def tree = localCreateExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) def printedTree = printTree(tree) expect: @@ -422,8 +424,8 @@ type Dog implements Animal{ Document document = TestUtil.parseQuery(query) - ExecutableNormalizedOperationFactory dependencyGraph = new ExecutableNormalizedOperationFactory() - def tree = dependencyGraph.createExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) + + def tree = localCreateExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) def printedTree = printTree(tree) expect: @@ -485,8 +487,8 @@ type Dog implements Animal{ Document document = TestUtil.parseQuery(query) - ExecutableNormalizedOperationFactory dependencyGraph = new ExecutableNormalizedOperationFactory() - def tree = dependencyGraph.createExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) + + def tree = localCreateExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) def printedTree = printTreeWithLevelInfo(tree, graphQLSchema) expect: @@ -531,8 +533,8 @@ type Dog implements Animal{ Document document = TestUtil.parseQuery(query) - ExecutableNormalizedOperationFactory dependencyGraph = new ExecutableNormalizedOperationFactory() - def tree = dependencyGraph.createExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) + + def tree = localCreateExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) def printedTree = printTree(tree) expect: @@ -575,8 +577,8 @@ type Dog implements Animal{ Document document = TestUtil.parseQuery(query) - ExecutableNormalizedOperationFactory dependencyGraph = new ExecutableNormalizedOperationFactory() - def tree = dependencyGraph.createExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) + + def tree = localCreateExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) def printedTree = printTree(tree) expect: @@ -619,8 +621,8 @@ type Dog implements Animal{ Document document = TestUtil.parseQuery(query) - ExecutableNormalizedOperationFactory dependencyGraph = new ExecutableNormalizedOperationFactory() - def tree = dependencyGraph.createExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) + + def tree = localCreateExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) def printedTree = printTree(tree) expect: @@ -651,8 +653,8 @@ type Dog implements Animal{ Document document = TestUtil.parseQuery(query) - ExecutableNormalizedOperationFactory dependencyGraph = new ExecutableNormalizedOperationFactory() - def tree = dependencyGraph.createExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) + + def tree = localCreateExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) def printedTree = printTree(tree) expect: @@ -702,8 +704,8 @@ type Dog implements Animal{ Document document = TestUtil.parseQuery(query) - ExecutableNormalizedOperationFactory dependencyGraph = new ExecutableNormalizedOperationFactory() - def tree = dependencyGraph.createExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) + + def tree = localCreateExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) def printedTree = printTreeWithLevelInfo(tree, graphQLSchema) expect: @@ -752,8 +754,8 @@ type Dog implements Animal{ Document document = TestUtil.parseQuery(query) - ExecutableNormalizedOperationFactory dependencyGraph = new ExecutableNormalizedOperationFactory() - def tree = dependencyGraph.createExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) + + def tree = localCreateExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) def printedTree = printTreeWithLevelInfo(tree, graphQLSchema) expect: @@ -791,8 +793,8 @@ type Dog implements Animal{ Document document = TestUtil.parseQuery(query) - ExecutableNormalizedOperationFactory dependencyGraph = new ExecutableNormalizedOperationFactory() - def tree = dependencyGraph.createExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) + + def tree = localCreateExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) def printedTree = printTree(tree) expect: @@ -835,8 +837,8 @@ type Dog implements Animal{ Document document = TestUtil.parseQuery(query) - def dependencyGraph = new ExecutableNormalizedOperationFactory() - def tree = dependencyGraph.createExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) + + def tree = localCreateExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) def printedTree = printTreeWithLevelInfo(tree, graphQLSchema) expect: @@ -875,8 +877,8 @@ type Dog implements Animal{ Document document = TestUtil.parseQuery(query) - def dependencyGraph = new ExecutableNormalizedOperationFactory() - def tree = dependencyGraph.createExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) + + def tree = localCreateExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) def printedTree = printTreeWithLevelInfo(tree, graphQLSchema) expect: @@ -923,8 +925,8 @@ type Dog implements Animal{ Document document = TestUtil.parseQuery(query) - def dependencyGraph = new ExecutableNormalizedOperationFactory() - def tree = dependencyGraph.createExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) + + def tree = localCreateExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) def printedTree = printTreeWithLevelInfo(tree, graphQLSchema) expect: @@ -1026,8 +1028,8 @@ type Dog implements Animal{ Document document = TestUtil.parseQuery(query) def subFooField = (document.getDefinitions()[1] as FragmentDefinition).getSelectionSet().getSelections()[0] as Field - ExecutableNormalizedOperationFactory dependencyGraph = new ExecutableNormalizedOperationFactory() - def tree = dependencyGraph.createExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) + + def tree = localCreateExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) def fieldToNormalizedField = tree.getFieldToNormalizedField() expect: @@ -1069,8 +1071,8 @@ type Dog implements Animal{ def petsField = (document.getDefinitions()[0] as OperationDefinition).getSelectionSet().getSelections()[0] as Field def idField = petsField.getSelectionSet().getSelections()[0] as Field - ExecutableNormalizedOperationFactory dependencyGraph = new ExecutableNormalizedOperationFactory() - def tree = dependencyGraph.createExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) + + def tree = localCreateExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) def fieldToNormalizedField = tree.getFieldToNormalizedField() @@ -1118,8 +1120,8 @@ type Dog implements Animal{ def schemaField = selections[2] as Field def typeField = selections[3] as Field - ExecutableNormalizedOperationFactory dependencyGraph = new ExecutableNormalizedOperationFactory() - def tree = dependencyGraph.createExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) + + def tree = localCreateExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) def fieldToNormalizedField = tree.getFieldToNormalizedField() expect: @@ -1175,8 +1177,8 @@ type Dog implements Animal{ Document document = TestUtil.parseQuery(query) - ExecutableNormalizedOperationFactory dependencyGraph = new ExecutableNormalizedOperationFactory() - def tree = dependencyGraph.createExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) + + def tree = localCreateExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) def printedTree = printTreeWithLevelInfo(tree, graphQLSchema) expect: @@ -1218,8 +1220,8 @@ type Dog implements Animal{ Document document = TestUtil.parseQuery(query) - ExecutableNormalizedOperationFactory dependencyGraph = new ExecutableNormalizedOperationFactory() - def tree = dependencyGraph.createExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) + + def tree = localCreateExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) def printedTree = printTree(tree) expect: @@ -1246,8 +1248,8 @@ type Dog implements Animal{ Document document = TestUtil.parseQuery(query) - ExecutableNormalizedOperationFactory dependencyGraph = new ExecutableNormalizedOperationFactory() - def tree = dependencyGraph.createExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) + + def tree = localCreateExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) def normalizedFieldToMergedField = tree.getNormalizedFieldToMergedField() Traverser traverser = Traverser.depthFirst({ it.getChildren() }) List result = new ArrayList<>() @@ -1284,10 +1286,9 @@ type Dog implements Animal{ Document document = TestUtil.parseQuery(query) - ExecutableNormalizedOperationFactory dependencyGraph = new ExecutableNormalizedOperationFactory() when: - def tree = dependencyGraph.createExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) + def tree = localCreateExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) def coordinatesToNormalizedFields = tree.coordinatesToNormalizedFields then: @@ -1385,8 +1386,8 @@ schema { Document document = TestUtil.parseQuery(mutation) - ExecutableNormalizedOperationFactory dependencyGraph = new ExecutableNormalizedOperationFactory() - def tree = dependencyGraph.createExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) + + def tree = localCreateExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) def printedTree = printTreeWithLevelInfo(tree, graphQLSchema) expect: @@ -1435,7 +1436,7 @@ schema { assertValidQuery(graphQLSchema, query) Document document = TestUtil.parseQuery(query) - ExecutableNormalizedOperationFactory dependencyGraph = new ExecutableNormalizedOperationFactory() + def variables = [ var1: [bar: 123], var2: [foo: "foo", input2: [bar: 123]] @@ -1443,7 +1444,7 @@ schema { // the normalized arg value should be the same regardless of how the value was provided def expectedNormalizedArgValue = [foo: new NormalizedInputValue("String", parseValue('"foo"')), input2: new NormalizedInputValue("Input2", [bar: new NormalizedInputValue("Int", parseValue("123"))])] when: - def tree = dependencyGraph.createExecutableNormalizedOperationWithRawVariables(graphQLSchema, document, null, RawVariables.of(variables)) + def tree = localCreateExecutableNormalizedOperationWithRawVariables(graphQLSchema, document, null, RawVariables.of(variables)) def topLevelField = tree.getTopLevelFields().get(0) def secondField = topLevelField.getChildren().get(0) def arg1 = secondField.getNormalizedArgument("arg1") @@ -1482,9 +1483,9 @@ schema { assertValidQuery(graphQLSchema, query) def document = TestUtil.parseQuery(query) - def dependencyGraph = new ExecutableNormalizedOperationFactory() + when: - def tree = dependencyGraph.createExecutableNormalizedOperationWithRawVariables(graphQLSchema, document, null, RawVariables.emptyVariables()) + def tree = localCreateExecutableNormalizedOperationWithRawVariables(graphQLSchema, document, null, RawVariables.emptyVariables()) then: def topLevelField = tree.getTopLevelFields().get(0) @@ -1517,13 +1518,13 @@ schema { assertValidQuery(graphQLSchema, query) def document = TestUtil.parseQuery(query) - def dependencyGraph = new ExecutableNormalizedOperationFactory() + def variables = [ varIds : null, otherVar: null, ] when: - def tree = dependencyGraph.createExecutableNormalizedOperationWithRawVariables(graphQLSchema, document, null, RawVariables.of(variables)) + def tree = localCreateExecutableNormalizedOperationWithRawVariables(graphQLSchema, document, null, RawVariables.of(variables)) then: def topLevelField = tree.getTopLevelFields().get(0) @@ -1573,9 +1574,9 @@ schema { ] assertValidQuery(graphQLSchema, query, variables) Document document = TestUtil.parseQuery(query) - ExecutableNormalizedOperationFactory dependencyGraph = new ExecutableNormalizedOperationFactory() + when: - def tree = dependencyGraph.createExecutableNormalizedOperationWithRawVariables(graphQLSchema, document, null, RawVariables.of(variables)) + def tree = localCreateExecutableNormalizedOperationWithRawVariables(graphQLSchema, document, null, RawVariables.of(variables)) def topLevelField = tree.getTopLevelFields().get(0) def arg1 = topLevelField.getNormalizedArgument("arg1") def arg2 = topLevelField.getNormalizedArgument("arg2") @@ -1626,9 +1627,9 @@ schema { ] assertValidQuery(graphQLSchema, query, variables) Document document = TestUtil.parseQuery(query) - ExecutableNormalizedOperationFactory dependencyGraph = new ExecutableNormalizedOperationFactory() + when: - def tree = dependencyGraph.createExecutableNormalizedOperationWithRawVariables(graphQLSchema, document, null, RawVariables.of(variables)) + def tree = localCreateExecutableNormalizedOperationWithRawVariables(graphQLSchema, document, null, RawVariables.of(variables)) def topLevelField = tree.getTopLevelFields().get(0) def arg1 = topLevelField.getNormalizedArgument("arg1") def arg2 = topLevelField.getNormalizedArgument("arg2") @@ -1681,9 +1682,9 @@ schema { ''' assertValidQuery(graphQLSchema, query) Document document = TestUtil.parseQuery(query) - ExecutableNormalizedOperationFactory dependencyGraph = new ExecutableNormalizedOperationFactory() + when: - def tree = dependencyGraph.createExecutableNormalizedOperationWithRawVariables(graphQLSchema, document, null, RawVariables.emptyVariables()) + def tree = localCreateExecutableNormalizedOperationWithRawVariables(graphQLSchema, document, null, RawVariables.emptyVariables()) then: tree.normalizedFieldToMergedField.size() == 3 @@ -1739,9 +1740,9 @@ schema { ''' assertValidQuery(graphQLSchema, query) Document document = TestUtil.parseQuery(query) - ExecutableNormalizedOperationFactory dependencyGraph = new ExecutableNormalizedOperationFactory() + when: - def tree = dependencyGraph.createExecutableNormalizedOperationWithRawVariables(graphQLSchema, document, null, RawVariables.emptyVariables()) + def tree = localCreateExecutableNormalizedOperationWithRawVariables(graphQLSchema, document, null, RawVariables.emptyVariables()) def printedTree = printTreeWithLevelInfo(tree, graphQLSchema) then: @@ -1787,9 +1788,9 @@ schema { ''' assertValidQuery(graphQLSchema, query) Document document = TestUtil.parseQuery(query) - ExecutableNormalizedOperationFactory dependencyGraph = new ExecutableNormalizedOperationFactory() + when: - def tree = dependencyGraph.createExecutableNormalizedOperationWithRawVariables(graphQLSchema, document, null, RawVariables.emptyVariables()) + def tree = localCreateExecutableNormalizedOperationWithRawVariables(graphQLSchema, document, null, RawVariables.emptyVariables()) def printedTree = printTreeWithLevelInfo(tree, graphQLSchema) then: @@ -1863,9 +1864,9 @@ schema { ''' assertValidQuery(schema, query) Document document = TestUtil.parseQuery(query) - ExecutableNormalizedOperationFactory dependencyGraph = new ExecutableNormalizedOperationFactory() + when: - def tree = dependencyGraph.createExecutableNormalizedOperationWithRawVariables(schema, document, null, RawVariables.emptyVariables()) + def tree = localCreateExecutableNormalizedOperationWithRawVariables(schema, document, null, RawVariables.emptyVariables()) def printedTree = printTreeWithLevelInfo(tree, schema) then: @@ -1927,9 +1928,9 @@ schema { ''' assertValidQuery(schema, query) Document document = TestUtil.parseQuery(query) - ExecutableNormalizedOperationFactory dependencyGraph = new ExecutableNormalizedOperationFactory() + when: - def tree = dependencyGraph.createExecutableNormalizedOperationWithRawVariables(schema, document, null, RawVariables.emptyVariables()) + def tree = localCreateExecutableNormalizedOperationWithRawVariables(schema, document, null, RawVariables.emptyVariables()) def printedTree = printTreeWithLevelInfo(tree, schema) then: @@ -1984,9 +1985,9 @@ schema { ''' assertValidQuery(schema, query) Document document = TestUtil.parseQuery(query) - ExecutableNormalizedOperationFactory dependencyGraph = new ExecutableNormalizedOperationFactory() + when: - def tree = dependencyGraph.createExecutableNormalizedOperationWithRawVariables(schema, document, null, RawVariables.emptyVariables()) + def tree = localCreateExecutableNormalizedOperationWithRawVariables(schema, document, null, RawVariables.emptyVariables()) def printedTree = printTreeWithLevelInfo(tree, schema) then: @@ -2059,9 +2060,9 @@ schema { ''' assertValidQuery(schema, query) Document document = TestUtil.parseQuery(query) - ExecutableNormalizedOperationFactory dependencyGraph = new ExecutableNormalizedOperationFactory() + when: - def tree = dependencyGraph.createExecutableNormalizedOperationWithRawVariables(schema, document, null, RawVariables.emptyVariables()) + def tree = localCreateExecutableNormalizedOperationWithRawVariables(schema, document, null, RawVariables.emptyVariables()) def printedTree = printTreeWithLevelInfo(tree, schema) then: @@ -2121,9 +2122,9 @@ schema { ''' assertValidQuery(schema, query) Document document = TestUtil.parseQuery(query) - ExecutableNormalizedOperationFactory dependencyGraph = new ExecutableNormalizedOperationFactory() + when: - def tree = dependencyGraph.createExecutableNormalizedOperationWithRawVariables(schema, document, null, RawVariables.emptyVariables()) + def tree = localCreateExecutableNormalizedOperationWithRawVariables(schema, document, null, RawVariables.emptyVariables()) def printedTree = printTreeWithLevelInfo(tree, schema) then: @@ -2163,9 +2164,9 @@ schema { ''' assertValidQuery(schema, query) Document document = TestUtil.parseQuery(query) - ExecutableNormalizedOperationFactory dependencyGraph = new ExecutableNormalizedOperationFactory() + when: - def tree = dependencyGraph.createExecutableNormalizedOperationWithRawVariables(schema, document, null, RawVariables.emptyVariables()) + def tree = localCreateExecutableNormalizedOperationWithRawVariables(schema, document, null, RawVariables.emptyVariables()) def printedTree = printTreeWithLevelInfo(tree, schema) then: @@ -2206,9 +2207,9 @@ schema { ''' assertValidQuery(schema, query) Document document = TestUtil.parseQuery(query) - ExecutableNormalizedOperationFactory dependencyGraph = new ExecutableNormalizedOperationFactory() + when: - def tree = dependencyGraph.createExecutableNormalizedOperationWithRawVariables(schema, document, null, RawVariables.emptyVariables()) + def tree = localCreateExecutableNormalizedOperationWithRawVariables(schema, document, null, RawVariables.emptyVariables()) def printedTree = printTreeWithLevelInfo(tree, schema) then: @@ -2249,9 +2250,9 @@ schema { ''' assertValidQuery(schema, query) Document document = TestUtil.parseQuery(query) - ExecutableNormalizedOperationFactory dependencyGraph = new ExecutableNormalizedOperationFactory() + when: - def tree = dependencyGraph.createExecutableNormalizedOperationWithRawVariables(schema, document, null, RawVariables.emptyVariables()) + def tree = localCreateExecutableNormalizedOperationWithRawVariables(schema, document, null, RawVariables.emptyVariables()) def printedTree = printTreeWithLevelInfo(tree, schema) then: @@ -2324,9 +2325,9 @@ schema { ''' assertValidQuery(schema, query) Document document = TestUtil.parseQuery(query) - ExecutableNormalizedOperationFactory dependencyGraph = new ExecutableNormalizedOperationFactory() + when: - def tree = dependencyGraph.createExecutableNormalizedOperationWithRawVariables(schema, document, null, RawVariables.emptyVariables()) + def tree = localCreateExecutableNormalizedOperationWithRawVariables(schema, document, null, RawVariables.emptyVariables()) def printedTree = printTreeWithLevelInfo(tree, schema) then: @@ -2400,9 +2401,9 @@ schema { ''' assertValidQuery(schema, query) Document document = TestUtil.parseQuery(query) - ExecutableNormalizedOperationFactory dependencyGraph = new ExecutableNormalizedOperationFactory() + when: - def tree = dependencyGraph.createExecutableNormalizedOperationWithRawVariables(schema, document, null, RawVariables.emptyVariables()) + def tree = localCreateExecutableNormalizedOperationWithRawVariables(schema, document, null, RawVariables.emptyVariables()) def printedTree = printTreeWithLevelInfo(tree, schema) then: @@ -2462,9 +2463,9 @@ schema { def variables = ["true": Boolean.TRUE, "false": Boolean.FALSE] assertValidQuery(graphQLSchema, query, variables) Document document = TestUtil.parseQuery(query) - ExecutableNormalizedOperationFactory dependencyGraph = new ExecutableNormalizedOperationFactory() + when: - def tree = dependencyGraph.createExecutableNormalizedOperationWithRawVariables(graphQLSchema, document, null, RawVariables.of(variables)) + def tree = localCreateExecutableNormalizedOperationWithRawVariables(graphQLSchema, document, null, RawVariables.of(variables)) println String.join("\n", printTree(tree)) def printedTree = printTree(tree) @@ -2519,9 +2520,9 @@ schema { def variables = [:] assertValidQuery(graphQLSchema, query, variables) Document document = TestUtil.parseQuery(query) - ExecutableNormalizedOperationFactory dependencyGraph = new ExecutableNormalizedOperationFactory() + when: - def tree = dependencyGraph.createExecutableNormalizedOperationWithRawVariables(graphQLSchema, document, null, RawVariables.of(variables)) + def tree = localCreateExecutableNormalizedOperationWithRawVariables(graphQLSchema, document, null, RawVariables.of(variables)) def printedTree = printTreeAndDirectives(tree) then: @@ -2584,8 +2585,8 @@ fragment personName on Person { Document document = TestUtil.parseQuery(query) - ExecutableNormalizedOperationFactory dependencyGraph = new ExecutableNormalizedOperationFactory() - def tree = dependencyGraph.createExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) + + def tree = localCreateExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) def printedTree = printTreeWithLevelInfo(tree, graphQLSchema) expect: @@ -2637,8 +2638,8 @@ fragment personName on Person { Document document = TestUtil.parseQuery(query) - def dependencyGraph = new ExecutableNormalizedOperationFactory() - def tree = dependencyGraph.createExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) + + def tree = localCreateExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) def printedTree = printTreeWithLevelInfo(tree, graphQLSchema) expect: @@ -2684,8 +2685,8 @@ fragment personName on Person { Document document = TestUtil.parseQuery(query) - def dependencyGraph = new ExecutableNormalizedOperationFactory() - def tree = dependencyGraph.createExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) + + def tree = localCreateExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) def printedTree = printTreeWithLevelInfo(tree, graphQLSchema) expect: @@ -2875,4 +2876,356 @@ fragment personName on Person { then: noExceptionThrown() } + + def "big query exceeding fields count"() { + String schema = """ + type Query { + animal: Animal + } + interface Animal { + name: String + friends: [Friend] + } + union Pet = Dog | Cat + type Friend { + name: String + isBirdOwner: Boolean + isCatOwner: Boolean + pets: [Pet] + } + type Bird implements Animal { + name: String + friends: [Friend] + } + type Cat implements Animal { + name: String + friends: [Friend] + breed: String + } + type Dog implements Animal { + name: String + breed: String + friends: [Friend] + } + """ + + def garbageFields = IntStream.range(0, 1000) + .mapToObj { + """test_$it: friends { name }""" + } + .collect(Collectors.joining("\n")) + + GraphQLSchema graphQLSchema = TestUtil.schema(schema) + + String query = """ + { + animal { + name + otherName: name + ... on Animal { + name + } + ... on Cat { + name + friends { + ... on Friend { + isCatOwner + pets { + ... on Dog { + name + } + } + } + } + } + ... on Bird { + friends { + isBirdOwner + } + friends { + name + pets { + ... on Cat { + breed + } + } + } + } + ... on Dog { + name + } + $garbageFields + } + } + """ + + assertValidQuery(graphQLSchema, query) + + Document document = TestUtil.parseQuery(query) + + when: + def result = ExecutableNormalizedOperationFactory.createExecutableNormalizedOperationWithRawVariables( + graphQLSchema, + document, + null, + RawVariables.emptyVariables(), + ExecutableNormalizedOperationFactory.Options.defaultOptions().maxFieldsCount(2013)) + + then: + def e = thrown(AbortExecutionException) + e.message == "Maximum field count exceeded. 2014 > 2013" + } + + def "small query exceeding fields count"() { + String schema = """ + type Query { + hello: String + } + """ + + GraphQLSchema graphQLSchema = TestUtil.schema(schema) + + String query = """ {hello a1: hello}""" + + assertValidQuery(graphQLSchema, query) + + Document document = TestUtil.parseQuery(query) + + when: + def result = ExecutableNormalizedOperationFactory.createExecutableNormalizedOperationWithRawVariables( + graphQLSchema, + document, + null, + RawVariables.emptyVariables(), + ExecutableNormalizedOperationFactory.Options.defaultOptions().maxFieldsCount(1)) + + then: + def e = thrown(AbortExecutionException) + e.message == "Maximum field count exceeded. 2 > 1" + + + } + + def "query not exceeding fields count"() { + String schema = """ + type Query { + dogs: [Dog] + } + type Dog { + name: String + breed: String + } + """ + + GraphQLSchema graphQLSchema = TestUtil.schema(schema) + + String query = """ {dogs{name breed }}""" + + assertValidQuery(graphQLSchema, query) + + Document document = TestUtil.parseQuery(query) + + when: + def result = ExecutableNormalizedOperationFactory.createExecutableNormalizedOperationWithRawVariables( + graphQLSchema, + document, + null, + RawVariables.emptyVariables(), + ExecutableNormalizedOperationFactory.Options.defaultOptions().maxFieldsCount(3)) + + then: + notThrown(AbortExecutionException) + + + } + + def "query with meta fields exceeding fields count"() { + String schema = """ + type Query { + hello: String + } + """ + + GraphQLSchema graphQLSchema = TestUtil.schema(schema) + + String query = IntrospectionQuery.INTROSPECTION_QUERY + + assertValidQuery(graphQLSchema, query) + + Document document = TestUtil.parseQuery(query) + + when: + def result = ExecutableNormalizedOperationFactory.createExecutableNormalizedOperationWithRawVariables( + graphQLSchema, + document, + null, + RawVariables.emptyVariables(), + ExecutableNormalizedOperationFactory.Options.defaultOptions().maxFieldsCount(188)) + println result.normalizedFieldToMergedField.size() + + then: + def e = thrown(AbortExecutionException) + e.message == "Maximum field count exceeded. 189 > 188" + } + + def "can capture depth and field count"() { + String schema = """ + type Query { + foo: Foo + } + + type Foo { + stop : String + bar : Bar + } + + type Bar { + stop : String + foo : Foo + } + """ + + GraphQLSchema graphQLSchema = TestUtil.schema(schema) + + String query = "{ foo { bar { foo { bar { foo { stop bar { stop }}}}}}}" + + assertValidQuery(graphQLSchema, query) + + Document document = TestUtil.parseQuery(query) + + when: + def result = ExecutableNormalizedOperationFactory.createExecutableNormalizedOperationWithRawVariables( + graphQLSchema, + document, + null, + RawVariables.emptyVariables() + ) + + then: + result.getOperationDepth() == 7 + result.getOperationFieldCount() == 8 + } + + def "factory has a default max node count"() { + String schema = """ + type Query { + foo: Foo + } + type Foo { + foo: Foo + name: String + } + """ + + GraphQLSchema graphQLSchema = TestUtil.schema(schema) + + String query = "{ foo { ...F1}} " + int fragmentCount = 12 + for (int i = 1; i < fragmentCount; i++) { + query += """ + fragment F$i on Foo { + foo { ...F${i + 1} } + a: foo{ ...F${i + 1} } + b: foo{ ...F${i + 1} } + } + """ + } + query += """ + fragment F$fragmentCount on Foo{ + name + } + """ + + assertValidQuery(graphQLSchema, query) + + Document document = TestUtil.parseQuery(query) + + when: + def result = ExecutableNormalizedOperationFactory.createExecutableNormalizedOperationWithRawVariables( + graphQLSchema, + document, + null, + RawVariables.emptyVariables() + ) + then: + def e = thrown(AbortExecutionException) + e.message == "Maximum field count exceeded. 100001 > 100000" + } + + def "default max fields can be changed "() { + String schema = """ + type Query { + foo: Foo + } + type Foo { + foo: Foo + name: String + } + """ + + GraphQLSchema graphQLSchema = TestUtil.schema(schema) + + String query = "{foo{foo{name}}} " + + assertValidQuery(graphQLSchema, query) + + Document document = TestUtil.parseQuery(query) + ExecutableNormalizedOperationFactory.Options.setDefaultOptions(ExecutableNormalizedOperationFactory.Options.defaultOptions().maxFieldsCount(2)) + + when: + def result = ExecutableNormalizedOperationFactory.createExecutableNormalizedOperationWithRawVariables( + graphQLSchema, + document, + null, + RawVariables.emptyVariables() + ) + then: + def e = thrown(AbortExecutionException) + e.message == "Maximum field count exceeded. 3 > 2" + cleanup: + ExecutableNormalizedOperationFactory.Options.setDefaultOptions(ExecutableNormalizedOperationFactory.Options.defaultOptions().maxFieldsCount(ExecutableNormalizedOperationFactory.Options.DEFAULT_MAX_FIELDS_COUNT)) + } + + + private static ExecutableNormalizedOperation localCreateExecutableNormalizedOperation( + GraphQLSchema graphQLSchema, + Document document, + String operationName, + CoercedVariables coercedVariableValues + ) { + + def options = ExecutableNormalizedOperationFactory.Options.defaultOptions().deferSupport(deferSupport) + + return ExecutableNormalizedOperationFactory.createExecutableNormalizedOperation(graphQLSchema, document, operationName, coercedVariableValues, options) + } + + private static ExecutableNormalizedOperation localCreateExecutableNormalizedOperationWithRawVariables( + GraphQLSchema graphQLSchema, + Document document, + String operationName, + RawVariables rawVariables + ) { + + def options = ExecutableNormalizedOperationFactory.Options.defaultOptions().deferSupport(deferSupport) + + return ExecutableNormalizedOperationFactory.createExecutableNormalizedOperationWithRawVariables( + graphQLSchema, + document, + operationName, + rawVariables, + options + ) + } +} + +class ExecutableNormalizedOperationFactoryTestWithDeferSupport extends ExecutableNormalizedOperationFactoryTest { + static { + deferSupport = true + } +} + +class ExecutableNormalizedOperationFactoryTestNoDeferSupport extends ExecutableNormalizedOperationFactoryTest { + static { + deferSupport = false + } } diff --git a/src/test/groovy/graphql/normalized/ExecutableNormalizedOperationToAstCompilerDeferTest.groovy b/src/test/groovy/graphql/normalized/ExecutableNormalizedOperationToAstCompilerDeferTest.groovy new file mode 100644 index 0000000000..13928fd991 --- /dev/null +++ b/src/test/groovy/graphql/normalized/ExecutableNormalizedOperationToAstCompilerDeferTest.groovy @@ -0,0 +1,525 @@ +package graphql.normalized + + +import graphql.GraphQL +import graphql.TestUtil +import graphql.execution.RawVariables +import graphql.language.AstPrinter +import graphql.language.AstSorter +import graphql.language.Document +import graphql.schema.GraphQLSchema +import graphql.schema.idl.RuntimeWiring +import graphql.schema.idl.TestLiveMockedWiringFactory +import graphql.schema.scalars.JsonScalar +import spock.lang.Specification + +import static graphql.ExecutionInput.newExecutionInput +import static graphql.language.OperationDefinition.Operation.QUERY +import static graphql.normalized.ExecutableNormalizedOperationToAstCompiler.compileToDocumentWithDeferSupport + +class ExecutableNormalizedOperationToAstCompilerDeferTest extends Specification { + VariablePredicate noVariables = new VariablePredicate() { + @Override + boolean shouldMakeVariable(ExecutableNormalizedField executableNormalizedField, String argName, NormalizedInputValue normalizedInputValue) { + return false + } + } + + String sdl = """ + directive @defer(if: Boolean, label: String) on FRAGMENT_SPREAD | INLINE_FRAGMENT + + type Query { + dog: Dog + animal: Animal + } + + interface Animal { + name: String + } + + type Dog implements Animal { + name: String + breed: String + owner: Person + } + + type Cat implements Animal { + name: String + breed: String + color: String + siblings: [Cat] + } + + type Fish implements Animal { + name: String + } + + type Person { + firstname: String + lastname: String + bestFriend: Person + } + """ + + def "simple defer"() { + String query = """ + query q { + dog { + name + ... @defer(label: "breed-defer") { + breed + } + } + } + """ + GraphQLSchema schema = mkSchema(sdl) + def tree = createNormalizedTree(schema, query) + when: + def result = compileToDocumentWithDeferSupport(schema, QUERY, null, tree.topLevelFields, noVariables) + def printed = AstPrinter.printAst(new AstSorter().sort(result.document)) + then: + printed == '''{ + dog { + name + ... @defer(label: "breed-defer") { + breed + } + } +} +''' + } + + def "simple defer with named spread"() { + String query = """ + query q { + dog { + name + ... on Dog @defer(label: "breed-defer") { + breed + } + } + } + """ + GraphQLSchema schema = mkSchema(sdl) + def tree = createNormalizedTree(schema, query) + when: + def result = compileToDocumentWithDeferSupport(schema, QUERY, null, tree.topLevelFields, noVariables) + def printed = AstPrinter.printAst(new AstSorter().sort(result.document)) + then: + printed == '''{ + dog { + name + ... @defer(label: "breed-defer") { + breed + } + } +} +''' + } + + def "multiple labels on the same field"() { + String query = """ + query q { + dog { + name + ... @defer(label: "breed-defer") { + breed + } + ... @defer(label: "breed-defer-2") { + breed + } + } + } + """ + GraphQLSchema schema = mkSchema(sdl) + def tree = createNormalizedTree(schema, query) + when: + def result = compileToDocumentWithDeferSupport(schema, QUERY, null, tree.topLevelFields, noVariables) + def printed = AstPrinter.printAst(new AstSorter().sort(result.document)) + then: + printed == '''{ + dog { + name + ... @defer(label: "breed-defer") { + breed + } + ... @defer(label: "breed-defer-2") { + breed + } + } +} +''' + } + + def "multiple defers without label on the same field"() { + String query = """ + query q { + dog { + name + ... @defer { + breed + } + ... @defer { + breed + } + } + } + """ + GraphQLSchema schema = mkSchema(sdl) + def tree = createNormalizedTree(schema, query) + when: + def result = compileToDocumentWithDeferSupport(schema, QUERY, null, tree.topLevelFields, noVariables) + def printed = AstPrinter.printAst(new AstSorter().sort(result.document)) + then: + printed == '''{ + dog { + name + ... @defer { + breed + } + ... @defer { + breed + } + } +} +''' + } + + def "field with and without defer"() { + String query = """ + query q { + dog { + ... @defer { + breed + } + ... { + breed + } + } + } + """ + GraphQLSchema schema = mkSchema(sdl) + def tree = createNormalizedTree(schema, query) + when: + def result = compileToDocumentWithDeferSupport(schema, QUERY, null, tree.topLevelFields, noVariables) + def printed = AstPrinter.printAst(new AstSorter().sort(result.document)) + then: + printed == '''{ + dog { + ... @defer { + breed + } + } +} +''' + } + + def "defer on type spread"() { + String query = """ + query q { + animal { + ... on Dog @defer { + breed + } + ... on Dog { + name + } + ... on Dog @defer(label: "owner-defer") { + owner { + firstname + } + } + } + } + """ + GraphQLSchema schema = mkSchema(sdl) + def tree = createNormalizedTree(schema, query) + when: + def result = compileToDocumentWithDeferSupport(schema, QUERY, null, tree.topLevelFields, noVariables) + def printed = AstPrinter.printAst(new AstSorter().sort(result.document)) + then: + printed == '''{ + animal { + ... on Dog @defer { + breed + } + ... on Dog { + name + } + ... on Dog @defer(label: "owner-defer") { + owner { + firstname + } + } + } +} +''' + } + + def "2 fragments on non-conditional fields"() { + String query = """ + query q { + animal { + ... on Cat @defer { + name + } + ... on Animal @defer { + name + } + } + } + """ + GraphQLSchema schema = mkSchema(sdl) + def tree = createNormalizedTree(schema, query) + when: + def result = compileToDocumentWithDeferSupport(schema, QUERY, null, tree.topLevelFields, noVariables) + def printed = AstPrinter.printAst(new AstSorter().sort(result.document)) + then: + printed == '''{ + animal { + ... @defer { + name + } + ... @defer { + name + } + } +} +''' + } + + def "2 fragments on conditional fields"() { + String query = """ + query q { + animal { + ... on Cat @defer { + breed + } + ... on Dog @defer { + breed + } + } + } + """ + GraphQLSchema schema = mkSchema(sdl) + def tree = createNormalizedTree(schema, query) + when: + def result = compileToDocumentWithDeferSupport(schema, QUERY, null, tree.topLevelFields, noVariables) + def printed = AstPrinter.printAst(new AstSorter().sort(result.document)) + then: + printed == '''{ + animal { + ... on Cat @defer { + breed + } + ... on Dog @defer { + breed + } + } +} +''' + } + + def "2 fragments on conditional fields with different labels"() { + String query = """ + query q { + animal { + ... on Cat @defer(label: "cat-defer") { + breed + } + ... on Dog @defer(label: "dog-defer") { + breed + } + } + } + """ + GraphQLSchema schema = mkSchema(sdl) + def tree = createNormalizedTree(schema, query) + when: + def result = compileToDocumentWithDeferSupport(schema, QUERY, null, tree.topLevelFields, noVariables) + def printed = AstPrinter.printAst(new AstSorter().sort(result.document)) + then: + printed == '''{ + animal { + ... on Cat @defer(label: "cat-defer") { + breed + } + ... on Dog @defer(label: "dog-defer") { + breed + } + } +} +''' + } + + def "fragments on conditional fields with different labels and repeating types"() { + String query = """ + query q { + animal { + ... on Cat @defer(label: "cat-defer-1") { + breed + } + ... on Cat @defer(label: "cat-defer-2") { + breed + } + ... on Dog @defer(label: "dog-defer") { + breed + } + } + } + """ + GraphQLSchema schema = mkSchema(sdl) + def tree = createNormalizedTree(schema, query) + when: + def result = compileToDocumentWithDeferSupport(schema, QUERY, null, tree.topLevelFields, noVariables) + def printed = AstPrinter.printAst(new AstSorter().sort(result.document)) + then: + printed == '''{ + animal { + ... on Cat @defer(label: "cat-defer-1") { + breed + } + ... on Cat @defer(label: "cat-defer-2") { + breed + } + ... on Dog @defer(label: "dog-defer") { + breed + } + } +} +''' + } + + def "nested defer"() { + String query = """ + query q { + animal { + ... on Cat @defer { + name + } + ... on Animal @defer { + name + ... on Dog @defer { + owner { + firstname + ... @defer { + lastname + } + ... @defer { + bestFriend { + firstname + ... @defer { + lastname + } + } + } + } + } + } + } + } + """ + GraphQLSchema schema = mkSchema(sdl) + def tree = createNormalizedTree(schema, query) + when: + def result = compileToDocumentWithDeferSupport(schema, QUERY, null, tree.topLevelFields, noVariables) + def printed = AstPrinter.printAst(new AstSorter().sort(result.document)) + then: + printed == '''{ + animal { + ... @defer { + name + } + ... @defer { + name + } + ... on Dog @defer { + owner { + firstname + ... @defer { + lastname + } + ... @defer { + bestFriend { + firstname + ... @defer { + lastname + } + } + } + } + } + } +} +''' + } + + def "multiple defers at the same level are preserved"() { + String query = """ + query q { + dog { + ... @defer { + name + } + ... @defer { + breed + } + ... @defer { + owner { + firstname + } + } + } + } + """ + GraphQLSchema schema = mkSchema(sdl) + def tree = createNormalizedTree(schema, query) + when: + def result = compileToDocumentWithDeferSupport(schema, QUERY, null, tree.topLevelFields, noVariables) + def printed = AstPrinter.printAst(new AstSorter().sort(result.document)) + then: + printed == '''{ + dog { + ... @defer { + name + } + ... @defer { + breed + } + ... @defer { + owner { + firstname + } + } + } +} +''' + } + + private ExecutableNormalizedOperation createNormalizedTree(GraphQLSchema schema, String query, Map variables = [:]) { + assertValidQuery(schema, query, variables) + Document originalDocument = TestUtil.parseQuery(query) + + ExecutableNormalizedOperationFactory dependencyGraph = new ExecutableNormalizedOperationFactory() + def options = ExecutableNormalizedOperationFactory.Options.defaultOptions().deferSupport(true) + return dependencyGraph.createExecutableNormalizedOperationWithRawVariables( + schema, + originalDocument, + null, + RawVariables.of(variables), + options + ) + } + + private void assertValidQuery(GraphQLSchema graphQLSchema, String query, Map variables = [:]) { + GraphQL graphQL = GraphQL.newGraphQL(graphQLSchema).build() + assert graphQL.execute(newExecutionInput().query(query).variables(variables)).errors.isEmpty() + } + + GraphQLSchema mkSchema(String sdl) { + def wiringFactory = new TestLiveMockedWiringFactory([JsonScalar.JSON_SCALAR]) + def runtimeWiring = RuntimeWiring.newRuntimeWiring() + .wiringFactory(wiringFactory).build() + TestUtil.schema(sdl, runtimeWiring) + } +} diff --git a/src/test/groovy/graphql/normalized/ExecutableNormalizedOperationToAstCompilerTest.groovy b/src/test/groovy/graphql/normalized/ExecutableNormalizedOperationToAstCompilerTest.groovy index dd074ce7a8..a135ac3f9b 100644 --- a/src/test/groovy/graphql/normalized/ExecutableNormalizedOperationToAstCompilerTest.groovy +++ b/src/test/groovy/graphql/normalized/ExecutableNormalizedOperationToAstCompilerTest.groovy @@ -3,12 +3,13 @@ package graphql.normalized import graphql.GraphQL import graphql.TestUtil import graphql.execution.RawVariables +import graphql.execution.directives.QueryDirectives import graphql.language.AstPrinter -import graphql.language.Field -import graphql.language.OperationDefinition import graphql.language.AstSorter import graphql.language.Document +import graphql.language.Field import graphql.language.IntValue +import graphql.language.OperationDefinition import graphql.language.StringValue import graphql.parser.Parser import graphql.schema.GraphQLSchema @@ -22,8 +23,12 @@ import static graphql.language.OperationDefinition.Operation.MUTATION import static graphql.language.OperationDefinition.Operation.QUERY import static graphql.language.OperationDefinition.Operation.SUBSCRIPTION import static graphql.normalized.ExecutableNormalizedOperationToAstCompiler.compileToDocument +import static graphql.normalized.ExecutableNormalizedOperationToAstCompiler.compileToDocumentWithDeferSupport + +abstract class ExecutableNormalizedOperationToAstCompilerTest extends Specification { + static boolean deferSupport + -class ExecutableNormalizedOperationToAstCompilerTest extends Specification { VariablePredicate noVariables = new VariablePredicate() { @Override boolean shouldMakeVariable(ExecutableNormalizedField executableNormalizedField, String argName, NormalizedInputValue normalizedInputValue) { @@ -128,7 +133,7 @@ class ExecutableNormalizedOperationToAstCompilerTest extends Specification { GraphQLSchema schema = mkSchema(sdl) def fields = createNormalizedFields(schema, query) when: - def result = compileToDocument(schema, QUERY, null, fields, noVariables) + def result = localCompileToDocument(schema, QUERY, null, fields, noVariables) def printed = AstPrinter.printAst(new AstSorter().sort(result.document)) then: printed == '''{ @@ -196,10 +201,9 @@ class ExecutableNormalizedOperationToAstCompilerTest extends Specification { """ def tree = createNormalizedTree(schema, query) - // printTreeWithLevelInfo(tree, schema).forEach { println it } when: - def result = compileToDocument(schema, QUERY, null, tree.topLevelFields, noVariables) + def result = localCompileToDocument(schema, QUERY, null, tree.topLevelFields, noVariables) def printed = AstPrinter.printAst(new AstSorter().sort(result.document)) then: @@ -250,7 +254,7 @@ class ExecutableNormalizedOperationToAstCompilerTest extends Specification { // printTreeWithLevelInfo(tree, schema).forEach { println it } when: - def result = compileToDocument(schema, QUERY, null, tree.topLevelFields, noVariables) + def result = localCompileToDocument(schema, QUERY, null, tree.topLevelFields, noVariables) def printed = AstPrinter.printAst(new AstSorter().sort(result.document)) then: @@ -331,7 +335,7 @@ class ExecutableNormalizedOperationToAstCompilerTest extends Specification { // printTreeWithLevelInfo(tree, schema).forEach { println it } when: - def result = compileToDocument(schema, QUERY, null, tree.topLevelFields, noVariables) + def result = localCompileToDocument(schema, QUERY, null, tree.topLevelFields, noVariables) def printed = AstPrinter.printAst(new AstSorter().sort(result.document)) then: @@ -356,6 +360,7 @@ class ExecutableNormalizedOperationToAstCompilerTest extends Specification { } """ } + def "test interface fields with different output types on the implementations 4"() { // Tests we don't consider File as a possible option for parent on animals def schema = TestUtil.schema(""" @@ -422,7 +427,7 @@ class ExecutableNormalizedOperationToAstCompilerTest extends Specification { // printTreeWithLevelInfo(tree, schema).forEach { println it } when: - def result = compileToDocument(schema, QUERY, null, tree.topLevelFields, noVariables) + def result = localCompileToDocument(schema, QUERY, null, tree.topLevelFields, noVariables) def printed = AstPrinter.printAst(new AstSorter().sort(result.document)) then: @@ -517,7 +522,7 @@ class ExecutableNormalizedOperationToAstCompilerTest extends Specification { // printTreeWithLevelInfo(tree, schema).forEach { println it } when: - def result = compileToDocument(schema, QUERY, null, tree.topLevelFields, noVariables) + def result = localCompileToDocument(schema, QUERY, null, tree.topLevelFields, noVariables) def printed = AstPrinter.printAst(new AstSorter().sort(result.document)) then: @@ -584,7 +589,7 @@ class ExecutableNormalizedOperationToAstCompilerTest extends Specification { // printTreeWithLevelInfo(tree, schema).forEach { println it } when: - def result = compileToDocument(schema, QUERY, null, tree.topLevelFields, noVariables) + def result = localCompileToDocument(schema, QUERY, null, tree.topLevelFields, noVariables) def printed = AstPrinter.printAst(new AstSorter().sort(result.document)) then: @@ -641,7 +646,7 @@ class ExecutableNormalizedOperationToAstCompilerTest extends Specification { // printTreeWithLevelInfo(tree, schema).forEach { println it } when: - def result = compileToDocument(schema, QUERY, null, tree.topLevelFields, noVariables) + def result = localCompileToDocument(schema, QUERY, null, tree.topLevelFields, noVariables) def printed = AstPrinter.printAst(new AstSorter().sort(result.document)) then: @@ -699,7 +704,7 @@ class ExecutableNormalizedOperationToAstCompilerTest extends Specification { // printTreeWithLevelInfo(tree, schema).forEach { println it } when: - def result = compileToDocument(schema, QUERY, null, tree.topLevelFields, noVariables) + def result = localCompileToDocument(schema, QUERY, null, tree.topLevelFields, noVariables) def printed = AstPrinter.printAst(new AstSorter().sort(result.document)) then: @@ -766,7 +771,7 @@ class ExecutableNormalizedOperationToAstCompilerTest extends Specification { // printTreeWithLevelInfo(tree, schema).forEach { println it } when: - def result = compileToDocument(schema, QUERY, null, tree.topLevelFields, noVariables) + def result = localCompileToDocument(schema, QUERY, null, tree.topLevelFields, noVariables) def printed = AstPrinter.printAst(new AstSorter().sort(result.document)) then: @@ -864,7 +869,7 @@ class ExecutableNormalizedOperationToAstCompilerTest extends Specification { // printTreeWithLevelInfo(tree, schema).forEach { println it } when: - def result = compileToDocument(schema, QUERY, null, tree.topLevelFields, noVariables) + def result = localCompileToDocument(schema, QUERY, null, tree.topLevelFields, noVariables) def printed = AstPrinter.printAst(new AstSorter().sort(result.document)) then: @@ -962,7 +967,7 @@ class ExecutableNormalizedOperationToAstCompilerTest extends Specification { // printTreeWithLevelInfo(tree, schema).forEach { println it } when: - def result = compileToDocument(schema, QUERY, null, tree.topLevelFields, noVariables) + def result = localCompileToDocument(schema, QUERY, null, tree.topLevelFields, noVariables) def printed = AstPrinter.printAst(new AstSorter().sort(result.document)) then: @@ -1029,7 +1034,7 @@ class ExecutableNormalizedOperationToAstCompilerTest extends Specification { GraphQLSchema schema = mkSchema(sdl) def fields = createNormalizedFields(schema, query) when: - def result = compileToDocument(schema, QUERY, null, fields, noVariables) + def result = localCompileToDocument(schema, QUERY, null, fields, noVariables) def documentPrinted = AstPrinter.printAst(new AstSorter().sort(result.document)) then: @@ -1063,7 +1068,7 @@ class ExecutableNormalizedOperationToAstCompilerTest extends Specification { GraphQLSchema schema = mkSchema(sdl) def fields = createNormalizedFields(schema, query) when: - def result = compileToDocument(schema, QUERY, null, fields, noVariables) + def result = localCompileToDocument(schema, QUERY, null, fields, noVariables) def documentPrinted = AstPrinter.printAst(new AstSorter().sort(result.document)) then: @@ -1089,7 +1094,7 @@ class ExecutableNormalizedOperationToAstCompilerTest extends Specification { GraphQLSchema schema = mkSchema(sdl) def fields = createNormalizedFields(schema, query) when: - def result = compileToDocument(schema, QUERY, "My_Op23", fields, noVariables) + def result = localCompileToDocument(schema, QUERY, "My_Op23", fields, noVariables) def documentPrinted = AstPrinter.printAst(new AstSorter().sort(result.document)) then: @@ -1134,7 +1139,7 @@ class ExecutableNormalizedOperationToAstCompilerTest extends Specification { GraphQLSchema schema = mkSchema(sdl) def fields = createNormalizedFields(schema, query) when: - def result = compileToDocument(schema, QUERY, null, fields, noVariables) + def result = localCompileToDocument(schema, QUERY, null, fields, noVariables) def documentPrinted = AstPrinter.printAst(new AstSorter().sort(result.document)) then: @@ -1166,9 +1171,9 @@ class ExecutableNormalizedOperationToAstCompilerTest extends Specification { } ''' GraphQLSchema schema = mkSchema(sdl) - def fields = createNormalizedFields(schema, query,["v":123]) + def fields = createNormalizedFields(schema, query, ["v": 123]) when: - def result = compileToDocument(schema, QUERY, null, fields, allVariables) + def result = localCompileToDocument(schema, QUERY, null, fields, allVariables) def documentPrinted = AstPrinter.printAst(new AstSorter().sort(result.document)) then: @@ -1200,7 +1205,7 @@ class ExecutableNormalizedOperationToAstCompilerTest extends Specification { GraphQLSchema schema = mkSchema(sdl) def fields = createNormalizedFields(schema, query) when: - def result = compileToDocument(schema, MUTATION, null, fields, noVariables) + def result = localCompileToDocument(schema, MUTATION, null, fields, noVariables) def documentPrinted = AstPrinter.printAst(new AstSorter().sort(result.document)) then: @@ -1231,7 +1236,7 @@ class ExecutableNormalizedOperationToAstCompilerTest extends Specification { GraphQLSchema schema = mkSchema(sdl) def fields = createNormalizedFields(schema, query) when: - def result = compileToDocument(schema, SUBSCRIPTION, null, fields, noVariables) + def result = localCompileToDocument(schema, SUBSCRIPTION, null, fields, noVariables) def documentPrinted = AstPrinter.printAst(new AstSorter().sort(result.document)) then: @@ -1242,7 +1247,6 @@ class ExecutableNormalizedOperationToAstCompilerTest extends Specification { } - def "test query directive"() { def sdl = ''' type Query { @@ -1275,14 +1279,14 @@ class ExecutableNormalizedOperationToAstCompilerTest extends Specification { ''' GraphQLSchema schema = mkSchema(sdl) Document document = new Parser().parse(query) - ExecutableNormalizedOperation eno = ExecutableNormalizedOperationFactory.createExecutableNormalizedOperationWithRawVariables(schema,document, null,RawVariables.emptyVariables()) + ExecutableNormalizedOperation eno = ExecutableNormalizedOperationFactory.createExecutableNormalizedOperationWithRawVariables(schema, document, null, RawVariables.emptyVariables()) when: - def result = compileToDocument(schema, SUBSCRIPTION, null, eno.topLevelFields, eno.normalizedFieldToQueryDirectives, noVariables) + def result = localCompileToDocument(schema, SUBSCRIPTION, null, eno.topLevelFields, eno.normalizedFieldToQueryDirectives, noVariables) OperationDefinition operationDefinition = result.document.getDefinitionsOfType(OperationDefinition.class)[0] - def fooField = (Field)operationDefinition.selectionSet.children[0] - def nameField = (Field)fooField.selectionSet.children[0] + def fooField = (Field) operationDefinition.selectionSet.children[0] + def nameField = (Field) fooField.selectionSet.children[0] def documentPrinted = AstPrinter.printAst(new AstSorter().sort(result.document)) then: @@ -1327,7 +1331,7 @@ class ExecutableNormalizedOperationToAstCompilerTest extends Specification { GraphQLSchema schema = mkSchema(sdl) def fields = createNormalizedFields(schema, query) when: - def result = compileToDocument(schema, MUTATION, null, fields, noVariables) + def result = localCompileToDocument(schema, MUTATION, null, fields, noVariables) def documentPrinted = AstPrinter.printAst(new AstSorter().sort(result.document)) then: @@ -1372,7 +1376,7 @@ class ExecutableNormalizedOperationToAstCompilerTest extends Specification { GraphQLSchema schema = mkSchema(sdl) def fields = createNormalizedFields(schema, query) when: - def result = compileToDocument(schema, QUERY, null, fields, noVariables) + def result = localCompileToDocument(schema, QUERY, null, fields, noVariables) def documentPrinted = AstPrinter.printAst(new AstSorter().sort(result.document)) then: documentPrinted == '''{ @@ -1383,17 +1387,10 @@ class ExecutableNormalizedOperationToAstCompilerTest extends Specification { ''' } - def "introspection query can be printed"() { + def "introspection query can be printed __schema"() { def sdl = ''' type Query { - foo1: Foo - } - interface Foo { - test: String - } - type AFoo implements Foo { - test: String - aFoo: String + f: String } ''' def query = ''' @@ -1405,20 +1402,13 @@ class ExecutableNormalizedOperationToAstCompilerTest extends Specification { } } } - - __type(name: "World") { - name - fields { - name - } - } - } + } ''' GraphQLSchema schema = mkSchema(sdl) def fields = createNormalizedFields(schema, query) when: - def result = compileToDocument(schema, QUERY, null, fields, noVariables) + def result = localCompileToDocument(schema, QUERY, null, fields, noVariables) def documentPrinted = AstPrinter.printAst(new AstSorter().sort(result.document)) then: documentPrinted == '''{ @@ -1429,6 +1419,34 @@ class ExecutableNormalizedOperationToAstCompilerTest extends Specification { } } } +} +''' + } + + def "introspection query can be printed __type"() { + def sdl = ''' + type Query { + f: String + } + ''' + def query = ''' + query introspection_query { + __type(name: "World") { + name + fields { + name + } + } + } + ''' + + GraphQLSchema schema = mkSchema(sdl) + def fields = createNormalizedFields(schema, query) + when: + def result = localCompileToDocument(schema, QUERY, null, fields, noVariables) + def documentPrinted = AstPrinter.printAst(new AstSorter().sort(result.document)) + then: + documentPrinted == '''{ __type(name: "World") { fields { name @@ -1438,6 +1456,7 @@ class ExecutableNormalizedOperationToAstCompilerTest extends Specification { } ''' } + def "test is conditional when there is only one interface implementation"() { def sdl = ''' type Query { @@ -1468,7 +1487,7 @@ class ExecutableNormalizedOperationToAstCompilerTest extends Specification { GraphQLSchema schema = mkSchema(sdl) def fields = createNormalizedFields(schema, query) when: - def result = compileToDocument(schema, QUERY, null, fields, noVariables) + def result = localCompileToDocument(schema, QUERY, null, fields, noVariables) def documentPrinted = AstPrinter.printAst(new AstSorter().sort(result.document)) then: documentPrinted == '''{ @@ -1507,7 +1526,7 @@ class ExecutableNormalizedOperationToAstCompilerTest extends Specification { GraphQLSchema schema = mkSchema(sdl) def fields = createNormalizedFields(schema, query) when: - def result = compileToDocument(schema, QUERY, null, fields, noVariables) + def result = localCompileToDocument(schema, QUERY, null, fields, noVariables) def documentPrinted = AstPrinter.printAst(new AstSorter().sort(result.document)) then: documentPrinted == '''{ @@ -1558,7 +1577,7 @@ class ExecutableNormalizedOperationToAstCompilerTest extends Specification { GraphQLSchema schema = mkSchema(sdl) def fields = createNormalizedFields(schema, query) when: - def result = compileToDocument(schema, QUERY, null, fields, noVariables) + def result = localCompileToDocument(schema, QUERY, null, fields, noVariables) def documentPrinted = AstPrinter.printAst(new AstSorter().sort(result.document)) then: // Note: the typename field moves out of a fragment because AFoo is the only impl @@ -1609,7 +1628,7 @@ class ExecutableNormalizedOperationToAstCompilerTest extends Specification { GraphQLSchema schema = mkSchema(sdl) def fields = createNormalizedFields(schema, query) when: - def result = compileToDocument(schema, QUERY, null, fields, noVariables) + def result = localCompileToDocument(schema, QUERY, null, fields, noVariables) def documentPrinted = AstPrinter.printAst(new AstSorter().sort(result.document)) then: // Note: the typename field moves out of a fragment because AFoo is the only impl @@ -1659,7 +1678,7 @@ class ExecutableNormalizedOperationToAstCompilerTest extends Specification { GraphQLSchema schema = TestUtil.schema(sdl) def fields = createNormalizedFields(schema, query) when: - def result = compileToDocument(schema, QUERY, null, fields, noVariables) + def result = localCompileToDocument(schema, QUERY, null, fields, noVariables) def documentPrinted = AstPrinter.printAst(new AstSorter().sort(result.document)) then: // Note: the typename field moves out of a fragment because AFoo is the only impl @@ -1695,7 +1714,7 @@ class ExecutableNormalizedOperationToAstCompilerTest extends Specification { def fields = createNormalizedFields(schema, query, vars) when: - def result = compileToDocument(schema, MUTATION, null, fields, jsonVariables) + def result = localCompileToDocument(schema, MUTATION, null, fields, jsonVariables) def documentPrinted = AstPrinter.printAst(new AstSorter().sort(result.document)) then: @@ -1727,7 +1746,7 @@ class ExecutableNormalizedOperationToAstCompilerTest extends Specification { def fields = createNormalizedFields(schema, query, vars) when: - def result = compileToDocument(schema, MUTATION, null, fields, jsonVariables) + def result = localCompileToDocument(schema, MUTATION, null, fields, jsonVariables) def documentPrinted = AstPrinter.printAst(new AstSorter().sort(result.document)) then: @@ -1759,7 +1778,7 @@ class ExecutableNormalizedOperationToAstCompilerTest extends Specification { def fields = createNormalizedFields(schema, query, vars) when: - def result = compileToDocument(schema, MUTATION, null, fields, jsonVariables) + def result = localCompileToDocument(schema, MUTATION, null, fields, jsonVariables) def documentPrinted = AstPrinter.printAst(new AstSorter().sort(result.document)) then: @@ -1789,7 +1808,7 @@ class ExecutableNormalizedOperationToAstCompilerTest extends Specification { def fields = createNormalizedFields(schema, query) when: - def result = compileToDocument(schema, MUTATION, null, fields, noVariables) + def result = localCompileToDocument(schema, MUTATION, null, fields, noVariables) def documentPrinted = AstPrinter.printAst(new AstSorter().sort(result.document)) then: @@ -1819,7 +1838,7 @@ class ExecutableNormalizedOperationToAstCompilerTest extends Specification { def fields = createNormalizedFields(schema, query) when: - def result = compileToDocument(schema, MUTATION, null, fields, noVariables) + def result = localCompileToDocument(schema, MUTATION, null, fields, noVariables) def documentPrinted = AstPrinter.printAst(new AstSorter().sort(result.document)) then: @@ -1849,7 +1868,7 @@ class ExecutableNormalizedOperationToAstCompilerTest extends Specification { def fields = createNormalizedFields(schema, query) when: - def result = compileToDocument(schema, MUTATION, null, fields, jsonVariables) + def result = localCompileToDocument(schema, MUTATION, null, fields, jsonVariables) def documentPrinted = AstPrinter.printAst(new AstSorter().sort(result.document)) then: @@ -1879,7 +1898,7 @@ class ExecutableNormalizedOperationToAstCompilerTest extends Specification { def fields = createNormalizedFields(schema, query) when: - def result = compileToDocument(schema, MUTATION, null, fields, jsonVariables) + def result = localCompileToDocument(schema, MUTATION, null, fields, jsonVariables) def documentPrinted = AstPrinter.printAst(new AstSorter().sort(result.document)) then: @@ -1916,7 +1935,7 @@ class ExecutableNormalizedOperationToAstCompilerTest extends Specification { def fields = createNormalizedFields(schema, query) when: - def result = compileToDocument(schema, MUTATION, null, fields, jsonVariables) + def result = localCompileToDocument(schema, MUTATION, null, fields, jsonVariables) def documentPrinted = AstPrinter.printAst(new AstSorter().sort(result.document)) def vars = result.variables @@ -1953,7 +1972,7 @@ class ExecutableNormalizedOperationToAstCompilerTest extends Specification { def fields = createNormalizedFields(schema, query) when: - def result = compileToDocument(schema, MUTATION, null, fields, noVariables) + def result = localCompileToDocument(schema, MUTATION, null, fields, noVariables) def documentPrinted = AstPrinter.printAst(new AstSorter().sort(result.document)) then: @@ -1988,7 +2007,7 @@ class ExecutableNormalizedOperationToAstCompilerTest extends Specification { def fields = createNormalizedFields(schema, query) when: - def result = compileToDocument(schema, MUTATION, null, fields, noVariables) + def result = localCompileToDocument(schema, MUTATION, null, fields, noVariables) def documentPrinted = AstPrinter.printAst(new AstSorter().sort(result.document)) then: @@ -2031,7 +2050,7 @@ class ExecutableNormalizedOperationToAstCompilerTest extends Specification { def fields = createNormalizedFields(schema, query, variables) when: - def result = compileToDocument(schema, MUTATION, null, fields, jsonVariables) + def result = localCompileToDocument(schema, MUTATION, null, fields, jsonVariables) def documentPrinted = AstPrinter.printAst(new AstSorter().sort(result.document)) then: @@ -2104,7 +2123,7 @@ class ExecutableNormalizedOperationToAstCompilerTest extends Specification { def fields = createNormalizedFields(schema, query) when: - def result = compileToDocument(schema, QUERY, "named", fields, allVariables) + def result = localCompileToDocument(schema, QUERY, "named", fields, allVariables) def document = result.document def vars = result.variables def ast = AstPrinter.printAst(new AstSorter().sort(document)) @@ -2140,8 +2159,8 @@ class ExecutableNormalizedOperationToAstCompilerTest extends Specification { assertValidQuery(schema, query, variables) Document originalDocument = TestUtil.parseQuery(query) - ExecutableNormalizedOperationFactory dependencyGraph = new ExecutableNormalizedOperationFactory() - return dependencyGraph.createExecutableNormalizedOperationWithRawVariables(schema, originalDocument, null, RawVariables.of(variables)) + def options = ExecutableNormalizedOperationFactory.Options.defaultOptions().deferSupport(deferSupport) + return ExecutableNormalizedOperationFactory.createExecutableNormalizedOperationWithRawVariables(schema, originalDocument, null, RawVariables.of(variables), options) } private List createNormalizedFields(GraphQLSchema schema, String query, Map variables = [:]) { @@ -2159,4 +2178,40 @@ class ExecutableNormalizedOperationToAstCompilerTest extends Specification { .wiringFactory(wiringFactory).build() TestUtil.schema(sdl, runtimeWiring) } + + private static ExecutableNormalizedOperationToAstCompiler.CompilerResult localCompileToDocument( + GraphQLSchema schema, + OperationDefinition.Operation operationKind, + String operationName, + List topLevelFields, + VariablePredicate variablePredicate + ) { + return localCompileToDocument(schema, operationKind, operationName, topLevelFields,Map.of(), variablePredicate); + } + + private static ExecutableNormalizedOperationToAstCompiler.CompilerResult localCompileToDocument( + GraphQLSchema schema, + OperationDefinition.Operation operationKind, + String operationName, + List topLevelFields, + Map normalizedFieldToQueryDirectives, + VariablePredicate variablePredicate + ) { + if (deferSupport) { + return compileToDocumentWithDeferSupport(schema, operationKind, operationName, topLevelFields, normalizedFieldToQueryDirectives, variablePredicate) + } + return compileToDocument(schema, operationKind, operationName, topLevelFields, normalizedFieldToQueryDirectives, variablePredicate) + } +} + +class ExecutableNormalizedOperationToAstCompilerTestWithDeferSupport extends ExecutableNormalizedOperationToAstCompilerTest { + static { + deferSupport = true + } +} + +class ExecutableNormalizedOperationToAstCompilerTestNoDeferSupport extends ExecutableNormalizedOperationToAstCompilerTest { + static { + deferSupport = false + } } diff --git a/src/test/groovy/graphql/parser/ParserExceptionTest.groovy b/src/test/groovy/graphql/parser/ParserExceptionTest.groovy index 51ebe9f32b..459ba957b6 100644 --- a/src/test/groovy/graphql/parser/ParserExceptionTest.groovy +++ b/src/test/groovy/graphql/parser/ParserExceptionTest.groovy @@ -79,10 +79,12 @@ fragment X on SomeType { } ''' when: - new Parser().parseDocument(sdl, "namedSource") + Reader reader = MultiSourceReader.newMultiSourceReader() + .string(sdl, "namedSource") + .build() + new Parser().parseDocument(reader) then: def e = thrown(InvalidSyntaxException) - print e e.location.line == 2 e.location.column == 13 diff --git a/src/test/groovy/graphql/parser/ParserTest.groovy b/src/test/groovy/graphql/parser/ParserTest.groovy index 4484eb9e58..42e604e9a6 100644 --- a/src/test/groovy/graphql/parser/ParserTest.groovy +++ b/src/test/groovy/graphql/parser/ParserTest.groovy @@ -4,6 +4,7 @@ package graphql.parser import graphql.language.Argument import graphql.language.ArrayValue import graphql.language.AstComparator +import graphql.language.AstPrinter import graphql.language.BooleanValue import graphql.language.Description import graphql.language.Directive @@ -1151,5 +1152,40 @@ triple3 : """edge cases \\""" "" " \\"" \\" edge cases""" document.getDefinitions()[0].getSourceLocation() == SourceLocation.EMPTY } + def "escape characters correctly printed when printing AST"() { + given: + def env = newParserEnvironment() + .document(src) + .parserOptions( + ParserOptions.newParserOptions() + .captureIgnoredChars(true) + .build() + ) + .build() + + when: + // Parse the original Document + def doc = Parser.parse(env) + // Print the AST + def printed = AstPrinter.printAst(doc) + // Re-parse printed AST + def reparsed = Parser.parse(printed) + + then: + noExceptionThrown() // The printed AST was re-parsed without exception + + when: + def reparsedPrinted = AstPrinter.printAst(reparsed) + + then: + reparsedPrinted == printed // Re-parsing and re-printing produces the same result + + where: + src | _ + "\"\\\"\" scalar A" | _ + "\"\f\" scalar A" | _ + "\"\b\" scalar A" | _ + "\"\t\" scalar A" | _ + } } diff --git a/src/test/groovy/graphql/parser/SDLParserTest.groovy b/src/test/groovy/graphql/parser/SDLParserTest.groovy index e2973ccabd..928619cf23 100644 --- a/src/test/groovy/graphql/parser/SDLParserTest.groovy +++ b/src/test/groovy/graphql/parser/SDLParserTest.groovy @@ -800,13 +800,15 @@ input Gun { when: def defaultDoc = new Parser().parseDocument(input) - def namedDocNull = new Parser().parseDocument(input, (String) null) - def namedDoc = new Parser().parseDocument(input, sourceName) + Reader reader = MultiSourceReader.newMultiSourceReader() + .string(input, sourceName) + .build(); + + def namedDoc = new Parser().parseDocument(reader) then: defaultDoc.definitions[0].sourceLocation.sourceName == null - namedDocNull.definitions[0].sourceLocation.sourceName == null namedDoc.definitions[0].sourceLocation.sourceName == sourceName } diff --git a/src/test/groovy/graphql/schema/GraphQLArgumentTest.groovy b/src/test/groovy/graphql/schema/GraphQLArgumentTest.groovy index 23a506621d..a957b25425 100644 --- a/src/test/groovy/graphql/schema/GraphQLArgumentTest.groovy +++ b/src/test/groovy/graphql/schema/GraphQLArgumentTest.groovy @@ -1,6 +1,7 @@ package graphql.schema import graphql.collect.ImmutableKit +import static graphql.introspection.Introspection.DirectiveLocation.ARGUMENT_DEFINITION import graphql.language.FloatValue import graphql.schema.validation.InvalidSchemaException import spock.lang.Specification @@ -9,10 +10,10 @@ import static graphql.Scalars.GraphQLFloat import static graphql.Scalars.GraphQLInt import static graphql.Scalars.GraphQLString import static graphql.schema.GraphQLArgument.newArgument -import static graphql.schema.GraphQLDirective.newDirective import static graphql.schema.GraphQLFieldDefinition.newFieldDefinition import static graphql.schema.GraphQLObjectType.newObject import static graphql.schema.GraphQLSchema.newSchema +import static graphql.TestUtil.mkDirective class GraphQLArgumentTest extends Specification { @@ -22,7 +23,7 @@ class GraphQLArgumentTest extends Specification { .description("A1_description") .type(GraphQLInt) .deprecate("custom reason") - .withDirective(newDirective().name("directive1")) + .withDirective(mkDirective("directive1", ARGUMENT_DEFINITION)) .build() when: def transformedArgument = startingArgument.transform({ @@ -30,7 +31,7 @@ class GraphQLArgumentTest extends Specification { .name("A2") .description("A2_description") .type(GraphQLString) - .withDirective(newDirective().name("directive3")) + .withDirective(mkDirective("directive3", ARGUMENT_DEFINITION)) .value("VALUE") // Retain deprecated for test coverage .deprecate(null) .defaultValue("DEFAULT") // Retain deprecated for test coverage @@ -79,9 +80,9 @@ class GraphQLArgumentTest extends Specification { def argument given: - def builder = GraphQLArgument.newArgument().name("A1") + def builder = newArgument().name("A1") .type(GraphQLInt) - .withDirective(newDirective().name("directive1")) + .withDirective(mkDirective("directive1", ARGUMENT_DEFINITION)) when: argument = builder.build() @@ -96,8 +97,8 @@ class GraphQLArgumentTest extends Specification { when: argument = builder .clearDirectives() - .withDirective(newDirective().name("directive2")) - .withDirective(newDirective().name("directive3")) + .withDirective(mkDirective("directive2", ARGUMENT_DEFINITION)) + .withDirective(mkDirective("directive3", ARGUMENT_DEFINITION)) .build() then: @@ -109,9 +110,9 @@ class GraphQLArgumentTest extends Specification { when: argument = builder .replaceDirectives([ - newDirective().name("directive1").build(), - newDirective().name("directive2").build(), - newDirective().name("directive3").build()]) // overwrite + mkDirective("directive1", ARGUMENT_DEFINITION), + mkDirective("directive2", ARGUMENT_DEFINITION), + mkDirective("directive3", ARGUMENT_DEFINITION)]) // overwrite .build() then: @@ -195,23 +196,53 @@ class GraphQLArgumentTest extends Specification { resolvedDefaultValue == null } - def "Applied schema directives arguments are validated for programmatic schemas"() { + def "schema directive arguments are validated for programmatic schemas"() { given: def arg = newArgument().name("arg").type(GraphQLInt).valueProgrammatic(ImmutableKit.emptyMap()).build() // Retain for test coverage - def directive = GraphQLDirective.newDirective().name("cached").argument(arg).build() + def directive = mkDirective("cached", ARGUMENT_DEFINITION, arg) def field = newFieldDefinition() - .name("hello") - .type(GraphQLString) - .argument(arg) - .withDirective(directive) - .build() + .name("hello") + .type(GraphQLString) + .argument(arg) + .withDirective(directive) + .build() when: - newSchema().query( + newSchema() + .query( newObject() - .name("Query") - .field(field) - .build()) + .name("Query") + .field(field) + .build() + ) + .additionalDirective(directive) + .build() + then: + def e = thrown(InvalidSchemaException) + e.message.contains("Invalid argument 'arg' for applied directive of name 'cached'") + } + + def "applied directive arguments are validated for programmatic schemas"() { + given: + def arg = newArgument() + .name("arg") + .type(GraphQLNonNull.nonNull(GraphQLInt)) .build() + def directive = mkDirective("cached", ARGUMENT_DEFINITION, arg) + def field = newFieldDefinition() + .name("hello") + .type(GraphQLString) + .withAppliedDirective(directive.toAppliedDirective()) + .build() + when: + newSchema() + .query( + newObject() + .name("Query") + .field(field) + .build() + ) + .additionalDirective(directive) + .build() then: def e = thrown(InvalidSchemaException) e.message.contains("Invalid argument 'arg' for applied directive of name 'cached'") diff --git a/src/test/groovy/graphql/schema/GraphQLDirectiveTest.groovy b/src/test/groovy/graphql/schema/GraphQLDirectiveTest.groovy index 5eba0fbc3a..4c9b8bd486 100644 --- a/src/test/groovy/graphql/schema/GraphQLDirectiveTest.groovy +++ b/src/test/groovy/graphql/schema/GraphQLDirectiveTest.groovy @@ -1,6 +1,8 @@ package graphql.schema +import graphql.AssertException import graphql.TestUtil +import graphql.introspection.Introspection import graphql.language.Node import spock.lang.Specification @@ -168,9 +170,38 @@ class GraphQLDirectiveTest extends Specification { then: assertDirectiveContainer(scalarType) + } + + def "throws an error on missing required properties"() { + given: + def validDirective = GraphQLDirective.newDirective() + .name("dir") + .validLocation(Introspection.DirectiveLocation.SCALAR) + .build() + + when: + validDirective.transform { it.name(null) } + + then: + def e = thrown(AssertException) + e.message.contains("Name must be non-null, non-empty") + when: + validDirective.transform { it.replaceArguments(null) } + + then: + def e2 = thrown(AssertException) + e2.message.contains("arguments must not be null") + + when: + validDirective.transform { it.clearValidLocations() } + + then: + def e3 = thrown(AssertException) + e3.message.contains("locations can't be empty") } + static boolean assertDirectiveContainer(GraphQLDirectiveContainer container) { assert container.hasDirective("d1") // Retain for test coverage assert container.hasAppliedDirective("d1") diff --git a/src/test/groovy/graphql/schema/GraphQLEnumValueDefinitionTest.groovy b/src/test/groovy/graphql/schema/GraphQLEnumValueDefinitionTest.groovy index 8f6a4145bc..0a755a9f3c 100644 --- a/src/test/groovy/graphql/schema/GraphQLEnumValueDefinitionTest.groovy +++ b/src/test/groovy/graphql/schema/GraphQLEnumValueDefinitionTest.groovy @@ -1,9 +1,10 @@ package graphql.schema +import static graphql.introspection.Introspection.DirectiveLocation import spock.lang.Specification -import static graphql.schema.GraphQLDirective.newDirective import static graphql.schema.GraphQLEnumValueDefinition.newEnumValueDefinition +import static graphql.TestUtil.mkDirective class GraphQLEnumValueDefinitionTest extends Specification { def "object can be transformed"() { @@ -11,15 +12,14 @@ class GraphQLEnumValueDefinitionTest extends Specification { def startEnumValue = newEnumValueDefinition().name("EV1") .description("EV1_description") .value("A") - .withDirective(newDirective().name("directive1")) + .withDirective(mkDirective("directive1", DirectiveLocation.ENUM_VALUE)) .build() when: def transformedEnumValue = startEnumValue.transform({ it .name("EV2") .value("X") - .withDirective(newDirective().name("directive2")) - + .withDirective(mkDirective("directive2", DirectiveLocation.ENUM_VALUE)) }) then: diff --git a/src/test/groovy/graphql/schema/GraphQLFieldDefinitionTest.groovy b/src/test/groovy/graphql/schema/GraphQLFieldDefinitionTest.groovy index d3c775cc9e..46a5c3c1e5 100644 --- a/src/test/groovy/graphql/schema/GraphQLFieldDefinitionTest.groovy +++ b/src/test/groovy/graphql/schema/GraphQLFieldDefinitionTest.groovy @@ -2,6 +2,7 @@ package graphql.schema import graphql.AssertException import graphql.TestUtil +import graphql.introspection.Introspection import graphql.schema.idl.SchemaPrinter import spock.lang.Specification @@ -10,9 +11,9 @@ import static graphql.Scalars.GraphQLFloat import static graphql.Scalars.GraphQLInt import static graphql.Scalars.GraphQLString import static graphql.TestUtil.mockArguments +import static graphql.TestUtil.mkDirective import static graphql.schema.DefaultGraphqlTypeComparatorRegistry.newComparators import static graphql.schema.GraphQLArgument.newArgument -import static graphql.schema.GraphQLDirective.newDirective import static graphql.schema.GraphQLFieldDefinition.newFieldDefinition import static graphql.schema.idl.SchemaPrinter.Options.defaultOptions @@ -35,8 +36,8 @@ class GraphQLFieldDefinitionTest extends Specification { .deprecate("F1_deprecated") .argument(newArgument().name("argStr").type(GraphQLString)) .argument(newArgument().name("argInt").type(GraphQLInt)) - .withDirective(newDirective().name("directive1")) - .withDirective(newDirective().name("directive2")) + .withDirective(mkDirective("directive1", Introspection.DirectiveLocation.FIELD_DEFINITION)) + .withDirective(mkDirective("directive2", Introspection.DirectiveLocation.FIELD_DEFINITION)) .build() when: @@ -47,13 +48,10 @@ class GraphQLFieldDefinitionTest extends Specification { .argument(newArgument().name("argStr").type(GraphQLString)) .argument(newArgument().name("argInt").type(GraphQLBoolean)) .argument(newArgument().name("argIntAdded").type(GraphQLInt)) - .withDirective(newDirective().name("directive3")) - + .withDirective(mkDirective("directive3", Introspection.DirectiveLocation.FIELD_DEFINITION)) }) - then: - startingField.name == "F1" startingField.type == GraphQLFloat startingField.description == "F1_description" diff --git a/src/test/groovy/graphql/schema/GraphQLInputObjectFieldTest.groovy b/src/test/groovy/graphql/schema/GraphQLInputObjectFieldTest.groovy index 27d9fe8da9..67e3611442 100644 --- a/src/test/groovy/graphql/schema/GraphQLInputObjectFieldTest.groovy +++ b/src/test/groovy/graphql/schema/GraphQLInputObjectFieldTest.groovy @@ -1,12 +1,13 @@ package graphql.schema +import graphql.introspection.Introspection import graphql.language.FloatValue import spock.lang.Specification import static graphql.Scalars.GraphQLFloat import static graphql.Scalars.GraphQLInt -import static graphql.schema.GraphQLDirective.newDirective import static graphql.schema.GraphQLInputObjectField.newInputObjectField +import static graphql.TestUtil.mkDirective class GraphQLInputObjectFieldTest extends Specification { @@ -16,8 +17,8 @@ class GraphQLInputObjectFieldTest extends Specification { .name("F1") .type(GraphQLFloat) .description("F1_description") - .withDirective(newDirective().name("directive1")) - .withDirective(newDirective().name("directive2")) + .withDirective(mkDirective("directive1", Introspection.DirectiveLocation.INPUT_FIELD_DEFINITION)) + .withDirective(mkDirective("directive2", Introspection.DirectiveLocation.INPUT_FIELD_DEFINITION)) .deprecate("No longer useful") .build() @@ -26,13 +27,10 @@ class GraphQLInputObjectFieldTest extends Specification { builder.name("F2") .type(GraphQLInt) .deprecate(null) - .withDirective(newDirective().name("directive3")) - + .withDirective(mkDirective("directive3", Introspection.DirectiveLocation.INPUT_FIELD_DEFINITION)) }) - then: - startingField.name == "F1" startingField.type == GraphQLFloat startingField.description == "F1_description" diff --git a/src/test/groovy/graphql/schema/GraphQLScalarTypeTest.groovy b/src/test/groovy/graphql/schema/GraphQLScalarTypeTest.groovy index ca020cb0b5..100269838d 100644 --- a/src/test/groovy/graphql/schema/GraphQLScalarTypeTest.groovy +++ b/src/test/groovy/graphql/schema/GraphQLScalarTypeTest.groovy @@ -1,8 +1,9 @@ package graphql.schema +import graphql.introspection.Introspection import spock.lang.Specification -import static graphql.schema.GraphQLDirective.newDirective +import static graphql.TestUtil.mkDirective class GraphQLScalarTypeTest extends Specification { Coercing coercing = new Coercing() { @@ -28,14 +29,14 @@ class GraphQLScalarTypeTest extends Specification { .name("S1") .description("S1_description") .coercing(coercing) - .withDirective(newDirective().name("directive1")) - .withDirective(newDirective().name("directive2")) + .withDirective(mkDirective("directive1", Introspection.DirectiveLocation.SCALAR)) + .withDirective(mkDirective("directive2", Introspection.DirectiveLocation.SCALAR)) .build() when: def transformedScalar = startingScalar.transform({ builder -> builder.name("S2") .description("S2_description") - .withDirective(newDirective().name("directive3")) + .withDirective(mkDirective("directive3", Introspection.DirectiveLocation.SCALAR)) }) then: @@ -55,6 +56,5 @@ class GraphQLScalarTypeTest extends Specification { transformedScalar.getDirective("directive1") != null transformedScalar.getDirective("directive2") != null transformedScalar.getDirective("directive3") != null - } } diff --git a/src/test/groovy/graphql/schema/GraphQLSchemaTest.groovy b/src/test/groovy/graphql/schema/GraphQLSchemaTest.groovy index de2a51bf04..bf18e083c2 100644 --- a/src/test/groovy/graphql/schema/GraphQLSchemaTest.groovy +++ b/src/test/groovy/graphql/schema/GraphQLSchemaTest.groovy @@ -5,6 +5,8 @@ import graphql.Directives import graphql.ExecutionInput import graphql.GraphQL import graphql.TestUtil +import graphql.language.Directive +import graphql.language.SchemaExtensionDefinition import graphql.schema.idl.RuntimeWiring import graphql.schema.idl.TypeRuntimeWiring import graphql.util.TraversalControl @@ -129,6 +131,23 @@ class GraphQLSchemaTest extends Specification { )) } + def "schema builder copies extension definitions"() { + setup: + def schemaBuilder = basicSchemaBuilder() + def newDirective = Directive.newDirective().name("pizza").build() + def extension = SchemaExtensionDefinition.newSchemaExtensionDefinition().directive(newDirective).build() + def oldSchema = schemaBuilder.extensionDefinitions([extension]).build() + + when: + def newSchema = GraphQLSchema.newSchema(oldSchema).build() + + then: + oldSchema.extensionDefinitions.size() == 1 + newSchema.extensionDefinitions.size() == 1 + ((Directive) oldSchema.extensionDefinitions.first().getDirectives().first()).name == "pizza" + ((Directive) newSchema.extensionDefinitions.first().getDirectives().first()).name == "pizza" + } + def "clear directives works as expected"() { setup: def schemaBuilder = basicSchemaBuilder() @@ -233,10 +252,8 @@ class GraphQLSchemaTest extends Specification { } union UnionType = Cat | Dog - ''' - when: def schema = TestUtil.schema(sdl) diff --git a/src/test/groovy/graphql/schema/PropertyDataFetcherTest.groovy b/src/test/groovy/graphql/schema/PropertyDataFetcherTest.groovy index f771835575..b9fae5e3f0 100644 --- a/src/test/groovy/graphql/schema/PropertyDataFetcherTest.groovy +++ b/src/test/groovy/graphql/schema/PropertyDataFetcherTest.groovy @@ -637,6 +637,38 @@ class PropertyDataFetcherTest extends Specification { result == "bar" } + class BaseObject { + private String id + + String getId() { + return id + } + + void setId(String value) { + id = value; + } + } + + class OtherObject extends BaseObject {} + + def "Can access private property from base class that starts with i in Turkish"() { + // see https://github.com/graphql-java/graphql-java/issues/3385 + given: + Locale oldLocale = Locale.getDefault() + Locale.setDefault(new Locale("tr", "TR")) + + def environment = env(new OtherObject(id: "aValue")) + def fetcher = PropertyDataFetcher.fetching("id") + + when: + String propValue = fetcher.get(environment) + + then: + propValue == 'aValue' + + cleanup: + Locale.setDefault(oldLocale) + } /** * Classes from issue to ensure we reproduce as reported by customers * diff --git a/src/test/groovy/graphql/schema/SchemaTraverserTest.groovy b/src/test/groovy/graphql/schema/SchemaTraverserTest.groovy index 10a8d53a93..f7a6fc8a9c 100644 --- a/src/test/groovy/graphql/schema/SchemaTraverserTest.groovy +++ b/src/test/groovy/graphql/schema/SchemaTraverserTest.groovy @@ -6,25 +6,22 @@ import graphql.util.TraversalControl import graphql.util.TraverserContext import spock.lang.Specification +import static graphql.introspection.Introspection.DirectiveLocation import static graphql.schema.GraphQLArgument.newArgument import static graphql.schema.GraphQLTypeReference.typeRef import static graphql.schema.GraphqlTypeComparatorRegistry.BY_NAME_REGISTRY +import static graphql.TestUtil.mkDirective class SchemaTraverserTest extends Specification { - def "reachable scalar type"() { when: - def visitor = new GraphQLTestingVisitor() new SchemaTraverser().depthFirst(visitor, Scalars.GraphQLString) then: - visitor.getStack() == ["scalar: String", "fallback: String"] - - } def "reachable string argument type"() { @@ -48,7 +45,6 @@ class SchemaTraverserTest extends Specification { .build()) then: visitor.getStack() == ["argument: Test", "fallback: Test", "scalar: Int", "fallback: Int"] - } def "reachable enum type"() { @@ -65,7 +61,6 @@ class SchemaTraverserTest extends Specification { visitor.getStack() == ["enum: foo", "fallback: foo", "enum value: abc", "fallback: abc", "enum value: bar", "fallback: bar"] - } def "reachable field definition type"() { @@ -77,7 +72,6 @@ class SchemaTraverserTest extends Specification { .build()) then: visitor.getStack() == ["field: foo", "fallback: foo", "scalar: String", "fallback: String"] - } def "reachable input object field type"() { @@ -107,7 +101,6 @@ class SchemaTraverserTest extends Specification { "scalar: String", "fallback: String"] } - def "reachable interface type"() { when: def visitor = new GraphQLTestingVisitor() @@ -163,7 +156,6 @@ class SchemaTraverserTest extends Specification { "interface: bar", "fallback: bar"] } - def "reachable reference type"() { when: def visitor = new GraphQLTestingVisitor() @@ -210,8 +202,7 @@ class SchemaTraverserTest extends Specification { def scalarType = GraphQLScalarType.newScalar() .name("foo") .coercing(coercing) - .withDirective(GraphQLDirective.newDirective() - .name("bar")) + .withDirective(mkDirective("bar", DirectiveLocation.SCALAR)) .withAppliedDirective(GraphQLAppliedDirective.newDirective() .name("barApplied")) .build() @@ -227,8 +218,7 @@ class SchemaTraverserTest extends Specification { def visitor = new GraphQLTestingVisitor() def objectType = GraphQLObjectType.newObject() .name("foo") - .withDirective(GraphQLDirective.newDirective() - .name("bar")) + .withDirective(mkDirective("bar", DirectiveLocation.OBJECT)) .withAppliedDirective(GraphQLAppliedDirective.newDirective() .name("barApplied")) .build() @@ -245,8 +235,7 @@ class SchemaTraverserTest extends Specification { def fieldDefinition = GraphQLFieldDefinition.newFieldDefinition() .name("foo") .type(Scalars.GraphQLString) - .withDirective(GraphQLDirective.newDirective() - .name("bar")) + .withDirective(mkDirective("bar", DirectiveLocation.FIELD_DEFINITION)) .withAppliedDirective(GraphQLAppliedDirective.newDirective() .name("barApplied")) .build() @@ -263,8 +252,7 @@ class SchemaTraverserTest extends Specification { def argument = newArgument() .name("foo") .type(Scalars.GraphQLString) - .withDirective(GraphQLDirective.newDirective() - .name("bar")) + .withDirective(mkDirective("bar", DirectiveLocation.ARGUMENT_DEFINITION)) .withAppliedDirective(GraphQLAppliedDirective.newDirective() .name("barApplied")) .build() @@ -280,8 +268,7 @@ class SchemaTraverserTest extends Specification { def visitor = new GraphQLTestingVisitor() def interfaceType = GraphQLInterfaceType.newInterface() .name("foo") - .withDirective(GraphQLDirective.newDirective() - .name("bar")) + .withDirective(mkDirective("bar", DirectiveLocation.INTERFACE)) .withAppliedDirective(GraphQLAppliedDirective.newDirective() .name("barApplied")) .build() @@ -298,8 +285,7 @@ class SchemaTraverserTest extends Specification { def unionType = GraphQLUnionType.newUnionType() .name("foo") .possibleType(GraphQLObjectType.newObject().name("dummy").build()) - .withDirective(GraphQLDirective.newDirective() - .name("bar")) + .withDirective(mkDirective("bar", DirectiveLocation.UNION)) .build() new SchemaTraverser().depthFirst(visitor, unionType) then: @@ -312,8 +298,7 @@ class SchemaTraverserTest extends Specification { def enumType = GraphQLEnumType.newEnum() .name("foo") .value("dummy") - .withDirective(GraphQLDirective.newDirective() - .name("bar")) + .withDirective(mkDirective("bar", DirectiveLocation.ENUM)) .build() new SchemaTraverser().depthFirst(visitor, enumType) then: @@ -325,8 +310,7 @@ class SchemaTraverserTest extends Specification { def visitor = new GraphQLTestingVisitor() def enumValue = GraphQLEnumValueDefinition.newEnumValueDefinition() .name("foo") - .withDirective(GraphQLDirective.newDirective() - .name("bar")) + .withDirective(mkDirective("bar", DirectiveLocation.ENUM_VALUE)) .build() new SchemaTraverser().depthFirst(visitor, enumValue) then: @@ -338,8 +322,7 @@ class SchemaTraverserTest extends Specification { def visitor = new GraphQLTestingVisitor() def inputObjectType = GraphQLInputObjectType.newInputObject() .name("foo") - .withDirective(GraphQLDirective.newDirective() - .name("bar")) + .withDirective(mkDirective("bar", DirectiveLocation.INPUT_OBJECT)) .build() new SchemaTraverser().depthFirst(visitor, inputObjectType) then: @@ -352,8 +335,7 @@ class SchemaTraverserTest extends Specification { def inputField = GraphQLInputObjectField.newInputObjectField() .name("foo") .type(Scalars.GraphQLString) - .withDirective(GraphQLDirective.newDirective() - .name("bar")) + .withDirective(mkDirective("bar", DirectiveLocation.INPUT_FIELD_DEFINITION)) .build() new SchemaTraverser().depthFirst(visitor, inputField) then: diff --git a/src/test/groovy/graphql/schema/diffing/ana/EditOperationAnalyzerAppliedDirectivesTest.groovy b/src/test/groovy/graphql/schema/diffing/ana/EditOperationAnalyzerAppliedDirectivesTest.groovy index 623c2dfc04..2fc9336bcc 100644 --- a/src/test/groovy/graphql/schema/diffing/ana/EditOperationAnalyzerAppliedDirectivesTest.groovy +++ b/src/test/groovy/graphql/schema/diffing/ana/EditOperationAnalyzerAppliedDirectivesTest.groovy @@ -5,6 +5,7 @@ import graphql.schema.diffing.SchemaDiffing import spock.lang.Specification import static graphql.schema.diffing.ana.SchemaDifference.AppliedDirectiveAddition +import static graphql.schema.diffing.ana.SchemaDifference.AppliedDirectiveArgumentAddition import static graphql.schema.diffing.ana.SchemaDifference.AppliedDirectiveArgumentDeletion import static graphql.schema.diffing.ana.SchemaDifference.AppliedDirectiveArgumentRename import static graphql.schema.diffing.ana.SchemaDifference.AppliedDirectiveArgumentValueModification @@ -32,26 +33,540 @@ import static graphql.schema.diffing.ana.SchemaDifference.UnionModification class EditOperationAnalyzerAppliedDirectivesTest extends Specification { + def "applied directive argument added interface field"() { + given: + def oldSdl = ''' + directive @d(arg1:String) on FIELD_DEFINITION + + type Query implements I{ + foo: String + } + interface I { + foo: String @d + } + ''' + def newSdl = ''' + directive @d(arg1:String) on FIELD_DEFINITION + + type Query implements I{ + foo: String + } + interface I { + foo: String @d(arg1: "foo") + } + ''' + when: + def changes = calcDiff(oldSdl, newSdl) + then: + changes.interfaceDifferences["I"] instanceof InterfaceModification + def argumentAddition = (changes.interfaceDifferences["I"] as InterfaceModification).getDetails(AppliedDirectiveArgumentAddition) + def location = argumentAddition[0].locationDetail as AppliedDirectiveInterfaceFieldLocation + location.interfaceName == "I" + location.fieldName == "foo" + argumentAddition[0].argumentName == "arg1" + } + + def "applied directive argument value changed object"() { + given: + def oldSdl = ''' + directive @d(arg1:String) on OBJECT + + type Query @d(arg1:"foo") { + foo: String + } + ''' + def newSdl = ''' + directive @d(arg1: String) on OBJECT + + type Query @d(arg1: "bar") { + foo: String + } + ''' + when: + def changes = calcDiff(oldSdl, newSdl) + then: + changes.objectDifferences["Query"] instanceof ObjectModification + def detail = (changes.objectDifferences["Query"] as ObjectModification).getDetails(AppliedDirectiveArgumentValueModification) + def location = detail[0].locationDetail as AppliedDirectiveObjectLocation + location.name == "Query" + location.directiveName == "d" + detail[0].argumentName == "arg1" + detail[0].oldValue == '"foo"' + detail[0].newValue == '"bar"' + } + + def "applied directive argument value changed object field"() { + given: + def oldSdl = ''' + directive @d(arg:String) on FIELD_DEFINITION + + type Query { + foo: String @d(arg: "foo") + } + ''' + def newSdl = ''' + directive @d(arg: String) on FIELD_DEFINITION + + type Query { + foo: String @d(arg: "bar") + } + ''' + when: + def changes = calcDiff(oldSdl, newSdl) + then: + changes.objectDifferences["Query"] instanceof ObjectModification + def detail = (changes.objectDifferences["Query"] as ObjectModification).getDetails(AppliedDirectiveArgumentValueModification) + def location = detail[0].locationDetail as AppliedDirectiveObjectFieldLocation + location.objectName == "Query" + location.fieldName == "foo" + location.directiveName == "d" + detail[0].argumentName == "arg" + detail[0].oldValue == '"foo"' + detail[0].newValue == '"bar"' + } + + def "applied directive argument value changed object field argument"() { + given: + def oldSdl = ''' + directive @d(directiveArg:String) on ARGUMENT_DEFINITION + type Query { + foo(arg: String @d(directiveArg: "foo")) : String + } + ''' + def newSdl = ''' + directive @d(directiveArg:String) on ARGUMENT_DEFINITION + type Query { + foo(arg: String @d(directiveArg: "bar")) : String + } + ''' + when: + def changes = calcDiff(oldSdl, newSdl) + then: + changes.objectDifferences["Query"] instanceof ObjectModification + def detail = (changes.objectDifferences["Query"] as ObjectModification).getDetails(AppliedDirectiveArgumentValueModification) + def locationDetail = detail[0].locationDetail as AppliedDirectiveObjectFieldArgumentLocation + locationDetail.objectName == "Query" + locationDetail.fieldName == "foo" + locationDetail.argumentName == "arg" + locationDetail.directiveName == "d" + detail[0].argumentName == "directiveArg" + detail[0].oldValue == '"foo"' + detail[0].newValue == '"bar"' + } + + + def "applied directive argument value changed interface"() { + given: + def oldSdl = ''' + directive @d(arg1:String) on INTERFACE + + type Query implements I{ + foo: String + } + interface I @d(arg1: "foo") { + foo: String + } + ''' + def newSdl = ''' + directive @d(arg1:String) on INTERFACE + + type Query implements I{ + foo: String + } + interface I @d(arg1: "bar") { + foo: String + } + ''' + when: + def changes = calcDiff(oldSdl, newSdl) + then: + changes.interfaceDifferences["I"] instanceof InterfaceModification + def detail = (changes.interfaceDifferences["I"] as InterfaceModification).getDetails(AppliedDirectiveArgumentValueModification) + def location = detail[0].locationDetail as AppliedDirectiveInterfaceLocation + location.name == "I" + location.directiveName == "d" + detail[0].argumentName == "arg1" + detail[0].oldValue == '"foo"' + detail[0].newValue == '"bar"' + } + + + def "applied directive argument value changed interface field"() { + given: + def oldSdl = ''' + directive @d(arg1:String) on FIELD_DEFINITION + + type Query implements I{ + foo: String + } + interface I { + foo: String @d(arg1: "foo") + } + ''' + def newSdl = ''' + directive @d(arg1:String) on FIELD_DEFINITION + + type Query implements I{ + foo: String + } + interface I { + foo: String @d(arg1: "bar") + } + ''' + when: + def changes = calcDiff(oldSdl, newSdl) + then: + changes.interfaceDifferences["I"] instanceof InterfaceModification + def detail = (changes.interfaceDifferences["I"] as InterfaceModification).getDetails(AppliedDirectiveArgumentValueModification) + def location = detail[0].locationDetail as AppliedDirectiveInterfaceFieldLocation + location.interfaceName == "I" + location.fieldName == "foo" + location.directiveName == "d" + detail[0].argumentName == "arg1" + detail[0].oldValue == '"foo"' + detail[0].newValue == '"bar"' + } + + def "applied directive argument value changed interface field argument"() { + given: + def oldSdl = ''' + directive @d(directiveArg:String) on ARGUMENT_DEFINITION + type Query implements I { + foo(arg: String) : String + } + interface I { + foo(arg: String @d(directiveArg: "foo") ): String + } + ''' + def newSdl = ''' + directive @d(directiveArg:String) on ARGUMENT_DEFINITION + type Query implements I { + foo(arg: String) : String + } + interface I { + foo(arg: String @d(directiveArg: "bar") ): String + } + ''' + when: + def changes = calcDiff(oldSdl, newSdl) + then: + changes.interfaceDifferences["I"] instanceof InterfaceModification + def detail = (changes.interfaceDifferences["I"] as InterfaceModification).getDetails(AppliedDirectiveArgumentValueModification) + def location = detail[0].locationDetail as AppliedDirectiveInterfaceFieldArgumentLocation + location.interfaceName == "I" + location.fieldName == "foo" + location.argumentName == "arg" + location.directiveName == "d" + detail[0].argumentName == "directiveArg" + detail[0].oldValue == '"foo"' + detail[0].newValue == '"bar"' + } + + def "applied directive argument value changed input object"() { + given: + def oldSdl = ''' + directive @d(arg:String) on INPUT_OBJECT + input I @d(arg: "foo"){ + a: String + } + type Query { + foo(arg: I): String + } + ''' + def newSdl = ''' + directive @d(arg:String) on INPUT_OBJECT + input I @d(arg: "bar") { + a: String + } + type Query { + foo(arg: I): String + } + ''' + when: + def changes = calcDiff(oldSdl, newSdl) + then: + changes.inputObjectDifferences["I"] instanceof InputObjectModification + def detail = (changes.inputObjectDifferences["I"] as InputObjectModification).getDetails(AppliedDirectiveArgumentValueModification) + def location = detail[0].locationDetail as AppliedDirectiveInputObjectLocation + location.name == "I" + location.directiveName == "d" + detail[0].argumentName == "arg" + detail[0].oldValue == '"foo"' + detail[0].newValue == '"bar"' + + } + + + def "applied directive argument value changed input object field "() { + given: + def oldSdl = ''' + directive @d(arg:String) on INPUT_FIELD_DEFINITION + input I { + a: String @d(arg: "foo") + } + type Query { + foo(arg: I): String + } + ''' + def newSdl = ''' + directive @d(arg:String) on INPUT_FIELD_DEFINITION + input I { + a: String @d(arg: "bar") + } + type Query { + foo(arg: I): String + } + ''' + when: + def changes = calcDiff(oldSdl, newSdl) + then: + changes.inputObjectDifferences["I"] instanceof InputObjectModification + def detail = (changes.inputObjectDifferences["I"] as InputObjectModification).getDetails(AppliedDirectiveArgumentValueModification) + def location = detail[0].locationDetail as AppliedDirectiveInputObjectFieldLocation + location.inputObjectName == "I" + location.fieldName == "a" + location.directiveName == "d" + detail[0].argumentName == "arg" + detail[0].oldValue == '"foo"' + detail[0].newValue == '"bar"' + } + + def "applied directive argument value changed enum"() { + given: + def oldSdl = ''' + directive @d(arg:String) on ENUM + enum E @d(arg:"foo") { A, B } + type Query { + foo: E + } + ''' + def newSdl = ''' + directive @d(arg:String) on ENUM + enum E @d(arg: "bar") { A, B } + type Query { + foo: E + } + ''' + when: + def changes = calcDiff(oldSdl, newSdl) + then: + changes.enumDifferences["E"] instanceof EnumModification + def detail = (changes.enumDifferences["E"] as EnumModification).getDetails(AppliedDirectiveArgumentValueModification) + def location = detail[0].locationDetail as AppliedDirectiveEnumLocation + location.name == "E" + location.directiveName == "d" + detail[0].argumentName == "arg" + detail[0].oldValue == '"foo"' + detail[0].newValue == '"bar"' + } + + def "applied directive argument value changed enum value"() { + given: + def oldSdl = ''' + directive @d(arg:String) on ENUM_VALUE + enum E { A, B @d(arg: "foo") } + type Query { + foo: E + } + ''' + def newSdl = ''' + directive @d(arg:String) on ENUM_VALUE + enum E { A, B @d(arg: "bar") } + type Query { + foo: E + } + ''' + when: + def changes = calcDiff(oldSdl, newSdl) + then: + changes.enumDifferences["E"] instanceof EnumModification + def detail = (changes.enumDifferences["E"] as EnumModification).getDetails(AppliedDirectiveArgumentValueModification) + def location = detail[0].locationDetail as AppliedDirectiveEnumValueLocation + location.enumName == "E" + location.valueName == "B" + location.directiveName == "d" + detail[0].argumentName == "arg" + detail[0].oldValue == '"foo"' + detail[0].newValue == '"bar"' + } + + def "applied directive argument value changed union"() { + given: + def oldSdl = ''' + directive @d(arg: String) on UNION + type Query { + foo: U + } + union U @d(arg: "foo") = A | B + type A { a: String } + type B { b: String } + ''' + def newSdl = ''' + directive @d(arg: String) on UNION + type Query { + foo: U + } + union U @d(arg: "bar") = A | B + type A { a: String } + type B { b: String } + ''' + when: + def changes = calcDiff(oldSdl, newSdl) + then: + changes.unionDifferences["U"] instanceof UnionModification + def detail = (changes.unionDifferences["U"] as UnionModification).getDetails(AppliedDirectiveArgumentValueModification) + (detail[0].locationDetail as AppliedDirectiveUnionLocation).name == "U" + detail[0].argumentName == "arg" + detail[0].oldValue == '"foo"' + detail[0].newValue == '"bar"' + } + + def "applied directive argument value changed scalar"() { + given: + def oldSdl = ''' + directive @d(arg:String) on SCALAR + scalar DateTime @d(arg: "foo") + type Query { + foo: DateTime + } + ''' + def newSdl = ''' + directive @d(arg:String) on SCALAR + scalar DateTime @d(arg: "bar") + type Query { + foo: DateTime + } + ''' + when: + def changes = calcDiff(oldSdl, newSdl) + then: + changes.scalarDifferences["DateTime"] instanceof ScalarModification + def detail = (changes.scalarDifferences["DateTime"] as ScalarModification).getDetails(AppliedDirectiveArgumentValueModification) + def location = detail[0].locationDetail as AppliedDirectiveScalarLocation + location.name == "DateTime" + location.directiveName == "d" + detail[0].argumentName == "arg" + detail[0].oldValue == '"foo"' + detail[0].newValue == '"bar"' + } + + def "applied directive argument value changed directive argument"() { + given: + def oldSdl = ''' + directive @d(arg1:String) on ARGUMENT_DEFINITION + directive @d2(arg:String @d(arg1:"foo")) on ARGUMENT_DEFINITION + type Query { + foo: String + } + ''' + def newSdl = ''' + directive @d(arg1:String) on ARGUMENT_DEFINITION + directive @d2(arg:String @d(arg1:"bar")) on ARGUMENT_DEFINITION + type Query { + foo: String + } + ''' + when: + def changes = calcDiff(oldSdl, newSdl) + then: + changes.directiveDifferences["d2"] instanceof DirectiveModification + (changes.directiveDifferences["d2"] as DirectiveModification).details.size() == 1 + def detail = (changes.directiveDifferences["d2"] as DirectiveModification).getDetails(AppliedDirectiveArgumentValueModification) + def location = detail[0].locationDetail as AppliedDirectiveDirectiveArgumentLocation + location.directiveDefinitionName == "d2" + location.directiveName == "d" + location.argumentName == "arg" + detail[0].argumentName == "arg1" + detail[0].oldValue == '"foo"' + detail[0].newValue == '"bar"' + } + + + def "applied directive argument added interface"() { + given: + def oldSdl = ''' + directive @d(arg1:String) on INTERFACE + + type Query implements I{ + foo: String + } + interface I @d { + foo: String + } + ''' + def newSdl = ''' + directive @d(arg1:String) on INTERFACE + + type Query implements I{ + foo: String + } + interface I @d(arg1: "foo") { + foo: String + } + ''' + when: + def changes = calcDiff(oldSdl, newSdl) + then: + changes.interfaceDifferences["I"] instanceof InterfaceModification + def argumentAddition = (changes.interfaceDifferences["I"] as InterfaceModification).getDetails(AppliedDirectiveArgumentAddition) + def location = argumentAddition[0].locationDetail as AppliedDirectiveInterfaceLocation + location.name == "I" + argumentAddition[0].argumentName == "arg1" + } + def "applied directive argument deleted interface field "() { given: def oldSdl = ''' - directive @d(arg1:String) on FIELD_DEFINITION + directive @d(arg1:String) on FIELD_DEFINITION + + type Query implements I{ + foo: String + } + interface I { + foo: String @d(arg1: "foo") + } + ''' + def newSdl = ''' + directive @d(arg1:String) on FIELD_DEFINITION + + type Query implements I{ + foo: String + } + interface I { + foo: String @d + } + ''' + when: + def changes = calcDiff(oldSdl, newSdl) + then: + changes.interfaceDifferences["I"] instanceof InterfaceModification + def argumentDeletions = (changes.interfaceDifferences["I"] as InterfaceModification).getDetails(AppliedDirectiveArgumentDeletion) + def location = argumentDeletions[0].locationDetail as AppliedDirectiveInterfaceFieldLocation + location.interfaceName == "I" + location.fieldName == "foo" + argumentDeletions[0].argumentName == "arg1" + } + + def "applied directive argument deleted interface"() { + given: + def oldSdl = ''' + directive @d(arg1:String) on INTERFACE type Query implements I{ foo: String } - interface I { - foo: String @d(arg1: "foo") + interface I @d(arg1: "foo") { + foo: String } ''' def newSdl = ''' - directive @d(arg1:String) on FIELD_DEFINITION + directive @d(arg1:String) on INTERFACE - type Query implements I{ + type Query implements I { foo: String } - interface I { - foo: String @d + interface I @d{ + foo: String } ''' when: @@ -59,12 +574,12 @@ class EditOperationAnalyzerAppliedDirectivesTest extends Specification { then: changes.interfaceDifferences["I"] instanceof InterfaceModification def argumentDeletions = (changes.interfaceDifferences["I"] as InterfaceModification).getDetails(AppliedDirectiveArgumentDeletion) - def location = argumentDeletions[0].locationDetail as AppliedDirectiveInterfaceFieldLocation - location.interfaceName == "I" - location.fieldName == "foo" + def location = argumentDeletions[0].locationDetail as AppliedDirectiveInterfaceLocation + location.name == "I" argumentDeletions[0].argumentName == "arg1" } + def "applied directive added input object field "() { given: def oldSdl = ''' @@ -177,6 +692,59 @@ class EditOperationAnalyzerAppliedDirectivesTest extends Specification { argumentRenames[0].newName == "arg2" } + def "applied directive argument added object"() { + given: + def oldSdl = ''' + directive @d(arg1:String) on OBJECT + + type Query @d { + foo: String + } + ''' + def newSdl = ''' + directive @d(arg1: String) on OBJECT + + type Query @d(arg1: "foo") { + foo: String + } + ''' + when: + def changes = calcDiff(oldSdl, newSdl) + then: + changes.objectDifferences["Query"] instanceof ObjectModification + def argumentAddition = (changes.objectDifferences["Query"] as ObjectModification).getDetails(AppliedDirectiveArgumentAddition) + def location = argumentAddition[0].locationDetail as AppliedDirectiveObjectLocation + location.name == "Query" + argumentAddition[0].argumentName == "arg1" + } + + def "applied directive argument added object field"() { + given: + def oldSdl = ''' + directive @d(arg1:String) on FIELD_DEFINITION + + type Query { + foo: String @d + } + ''' + def newSdl = ''' + directive @d(arg1: String) on FIELD_DEFINITION + + type Query { + foo: String @d(arg1: "foo") + } + ''' + when: + def changes = calcDiff(oldSdl, newSdl) + then: + changes.objectDifferences["Query"] instanceof ObjectModification + def argumentAddition = (changes.objectDifferences["Query"] as ObjectModification).getDetails(AppliedDirectiveArgumentAddition) + def location = argumentAddition[0].locationDetail as AppliedDirectiveObjectFieldLocation + location.objectName == "Query" + location.fieldName == "foo" + argumentAddition[0].argumentName == "arg1" + } + def "applied directive argument deleted object field"() { given: def oldSdl = ''' @@ -204,6 +772,32 @@ class EditOperationAnalyzerAppliedDirectivesTest extends Specification { argumentDeletions[0].argumentName == "arg1" } + def "applied directive argument deleted object"() { + given: + def oldSdl = ''' + directive @d(arg1:String) on OBJECT + + type Query @d(arg1: "foo"){ + foo: String + } + ''' + def newSdl = ''' + directive @d(arg1: String) on OBJECT + + type Query @d { + foo: String + } + ''' + when: + def changes = calcDiff(oldSdl, newSdl) + then: + changes.objectDifferences["Query"] instanceof ObjectModification + def argumentDeletions = (changes.objectDifferences["Query"] as ObjectModification).getDetails(AppliedDirectiveArgumentDeletion) + def location = argumentDeletions[0].locationDetail as AppliedDirectiveObjectLocation + location.name == "Query" + argumentDeletions[0].argumentName == "arg1" + } + def "applied directive added input object"() { given: def oldSdl = ''' @@ -253,6 +847,7 @@ class EditOperationAnalyzerAppliedDirectivesTest extends Specification { def changes = calcDiff(oldSdl, newSdl) then: changes.objectDifferences["Query"] instanceof ObjectModification + (changes.objectDifferences["Query"] as ObjectModification).details.size() == 1 def appliedDirective = (changes.objectDifferences["Query"] as ObjectModification).getDetails(AppliedDirectiveAddition) (appliedDirective[0].locationDetail as AppliedDirectiveObjectLocation).name == "Query" appliedDirective[0].name == "d" @@ -394,6 +989,63 @@ class EditOperationAnalyzerAppliedDirectivesTest extends Specification { appliedDirective[0].name == "d" } + def "applied directive argument added enum value"() { + given: + def oldSdl = ''' + directive @d(arg:String) on ENUM_VALUE + enum E { A, B @d } + type Query { + foo: E + } + ''' + def newSdl = ''' + directive @d(arg:String) on ENUM_VALUE + enum E { A, B @d(arg: "foo") } + type Query { + foo: E + } + ''' + when: + def changes = calcDiff(oldSdl, newSdl) + then: + changes.enumDifferences["E"] instanceof EnumModification + def argumentAdded = (changes.enumDifferences["E"] as EnumModification).getDetails(AppliedDirectiveArgumentAddition) + def location = argumentAdded[0].locationDetail as AppliedDirectiveEnumValueLocation + location.enumName == "E" + location.valueName == "B" + location.directiveName == "d" + argumentAdded[0].argumentName == "arg" + } + + def "applied directive argument deleted enum value"() { + given: + def oldSdl = ''' + directive @d(arg:String) on ENUM_VALUE + enum E { A, B @d(arg: "foo") } + type Query { + foo: E + } + ''' + def newSdl = ''' + directive @d(arg:String) on ENUM_VALUE + enum E { A, B @d } + type Query { + foo: E + } + ''' + when: + def changes = calcDiff(oldSdl, newSdl) + then: + changes.enumDifferences["E"] instanceof EnumModification + def argumentDeletion = (changes.enumDifferences["E"] as EnumModification).getDetails(AppliedDirectiveArgumentDeletion) + def location = argumentDeletion[0].locationDetail as AppliedDirectiveEnumValueLocation + location.enumName == "E" + location.valueName == "B" + location.directiveName == "d" + argumentDeletion[0].argumentName == "arg" + } + + def "applied directive added object field argument"() { given: def oldSdl = ''' @@ -405,63 +1057,235 @@ class EditOperationAnalyzerAppliedDirectivesTest extends Specification { def newSdl = ''' directive @d(arg:String) on ARGUMENT_DEFINITION type Query { - foo(arg: String @d(arg: "foo")) : String + foo(arg: String @d(arg: "foo")) : String + } + ''' + when: + def changes = calcDiff(oldSdl, newSdl) + then: + changes.objectDifferences["Query"] instanceof ObjectModification + def appliedDirective = (changes.objectDifferences["Query"] as ObjectModification).getDetails(AppliedDirectiveAddition) + (appliedDirective[0].locationDetail as AppliedDirectiveObjectFieldArgumentLocation).objectName == "Query" + (appliedDirective[0].locationDetail as AppliedDirectiveObjectFieldArgumentLocation).fieldName == "foo" + (appliedDirective[0].locationDetail as AppliedDirectiveObjectFieldArgumentLocation).argumentName == "arg" + appliedDirective[0].name == "d" + } + + def "applied directive argument added object field argument"() { + given: + def oldSdl = ''' + directive @d(directiveArg:String) on ARGUMENT_DEFINITION + type Query { + foo(arg: String @d) : String + } + ''' + def newSdl = ''' + directive @d(directiveArg:String) on ARGUMENT_DEFINITION + type Query { + foo(arg: String @d(directiveArg: "foo")) : String + } + ''' + when: + def changes = calcDiff(oldSdl, newSdl) + then: + changes.objectDifferences["Query"] instanceof ObjectModification + def appliedDirectiveArgumentAddition = (changes.objectDifferences["Query"] as ObjectModification).getDetails(AppliedDirectiveArgumentAddition) + def locationDetail = appliedDirectiveArgumentAddition[0].locationDetail as AppliedDirectiveObjectFieldArgumentLocation + locationDetail.objectName == "Query" + locationDetail.fieldName == "foo" + locationDetail.argumentName == "arg" + appliedDirectiveArgumentAddition[0].argumentName == "directiveArg" + } + + def "applied directive argument deleted object field argument"() { + given: + def oldSdl = ''' + directive @d(directiveArg:String) on ARGUMENT_DEFINITION + type Query { + foo(arg: String @d(directiveArg: "foo")) : String + } + ''' + def newSdl = ''' + directive @d(directiveArg:String) on ARGUMENT_DEFINITION + type Query { + foo(arg: String @d) : String + } + ''' + when: + def changes = calcDiff(oldSdl, newSdl) + then: + changes.objectDifferences["Query"] instanceof ObjectModification + def appliedDirectiveArgumentDeletion = (changes.objectDifferences["Query"] as ObjectModification).getDetails(AppliedDirectiveArgumentDeletion) + def locationDetail = appliedDirectiveArgumentDeletion[0].locationDetail as AppliedDirectiveObjectFieldArgumentLocation + locationDetail.objectName == "Query" + locationDetail.fieldName == "foo" + locationDetail.argumentName == "arg" + appliedDirectiveArgumentDeletion[0].argumentName == "directiveArg" + } + + def "applied directive added interface field argument"() { + given: + def oldSdl = ''' + directive @d(arg:String) on ARGUMENT_DEFINITION + type Query implements I { + foo(arg: String) : String + } + interface I { + foo(arg: String): String + } + ''' + def newSdl = ''' + directive @d(arg:String) on ARGUMENT_DEFINITION + type Query implements I { + foo(arg: String) : String + } + interface I { + foo(arg: String @d): String + } + ''' + when: + def changes = calcDiff(oldSdl, newSdl) + then: + changes.interfaceDifferences["I"] instanceof InterfaceModification + def appliedDirective = (changes.interfaceDifferences["I"] as InterfaceModification).getDetails(AppliedDirectiveAddition) + (appliedDirective[0].locationDetail as AppliedDirectiveInterfaceFieldArgumentLocation).interfaceName == "I" + (appliedDirective[0].locationDetail as AppliedDirectiveInterfaceFieldArgumentLocation).fieldName == "foo" + (appliedDirective[0].locationDetail as AppliedDirectiveInterfaceFieldArgumentLocation).argumentName == "arg" + appliedDirective[0].name == "d" + } + + def "applied directive argument added interface field argument"() { + given: + def oldSdl = ''' + directive @d(directiveArg:String) on ARGUMENT_DEFINITION + type Query implements I { + foo(arg: String) : String + } + interface I { + foo(arg: String @d): String + } + ''' + def newSdl = ''' + directive @d(directiveArg:String) on ARGUMENT_DEFINITION + type Query implements I { + foo(arg: String) : String + } + interface I { + foo(arg: String @d(directiveArg: "foo") ): String + } + ''' + when: + def changes = calcDiff(oldSdl, newSdl) + then: + changes.interfaceDifferences["I"] instanceof InterfaceModification + def appliedDirective = (changes.interfaceDifferences["I"] as InterfaceModification).getDetails(AppliedDirectiveArgumentAddition) + def location = appliedDirective[0].locationDetail as AppliedDirectiveInterfaceFieldArgumentLocation + location.interfaceName == "I" + location.fieldName == "foo" + location.argumentName == "arg" + appliedDirective[0].argumentName == "directiveArg" + } + + def "applied directive argument deleted interface field argument"() { + given: + def oldSdl = ''' + directive @d(directiveArg:String) on ARGUMENT_DEFINITION + type Query implements I { + foo(arg: String) : String + } + interface I { + foo(arg: String @d(directiveArg: "foo")): String + } + ''' + def newSdl = ''' + directive @d(directiveArg:String) on ARGUMENT_DEFINITION + type Query implements I { + foo(arg: String) : String + } + interface I { + foo(arg: String @d): String + } + ''' + when: + def changes = calcDiff(oldSdl, newSdl) + then: + changes.interfaceDifferences["I"] instanceof InterfaceModification + def appliedDirective = (changes.interfaceDifferences["I"] as InterfaceModification).getDetails(AppliedDirectiveArgumentDeletion) + def location = appliedDirective[0].locationDetail as AppliedDirectiveInterfaceFieldArgumentLocation + location.interfaceName == "I" + location.fieldName == "foo" + location.argumentName == "arg" + appliedDirective[0].argumentName == "directiveArg" + } + + def "applied directive added directive argument "() { + given: + def oldSdl = ''' + directive @d(arg:String) on ARGUMENT_DEFINITION + directive @d2(arg:String) on ARGUMENT_DEFINITION + type Query { + foo: String + } + ''' + def newSdl = ''' + directive @d(arg:String) on ARGUMENT_DEFINITION + directive @d2(arg:String @d) on ARGUMENT_DEFINITION + type Query { + foo: String } ''' when: def changes = calcDiff(oldSdl, newSdl) then: - changes.objectDifferences["Query"] instanceof ObjectModification - def appliedDirective = (changes.objectDifferences["Query"] as ObjectModification).getDetails(AppliedDirectiveAddition) - (appliedDirective[0].locationDetail as AppliedDirectiveObjectFieldArgumentLocation).objectName == "Query" - (appliedDirective[0].locationDetail as AppliedDirectiveObjectFieldArgumentLocation).fieldName == "foo" - (appliedDirective[0].locationDetail as AppliedDirectiveObjectFieldArgumentLocation).argumentName == "arg" + changes.directiveDifferences["d2"] instanceof DirectiveModification + def appliedDirective = (changes.directiveDifferences["d2"] as DirectiveModification).getDetails(AppliedDirectiveAddition) + def location = appliedDirective[0].locationDetail as AppliedDirectiveDirectiveArgumentLocation + location.directiveDefinitionName == "d2" + location.argumentName == "arg" + location.directiveName == "d" appliedDirective[0].name == "d" } - def "applied directive added interface field argument"() { + def "applied directive argument added directive argument "() { given: def oldSdl = ''' directive @d(arg:String) on ARGUMENT_DEFINITION - type Query implements I { - foo(arg: String) : String - } - interface I { - foo(arg: String): String + directive @d2(arg2:String @d) on ARGUMENT_DEFINITION + type Query { + foo: String } ''' def newSdl = ''' directive @d(arg:String) on ARGUMENT_DEFINITION - type Query implements I { - foo(arg: String) : String - } - interface I { - foo(arg: String @d): String + directive @d2(arg2:String @d(arg:"foo") ) on ARGUMENT_DEFINITION + type Query { + foo: String } ''' when: def changes = calcDiff(oldSdl, newSdl) then: - changes.interfaceDifferences["I"] instanceof InterfaceModification - def appliedDirective = (changes.interfaceDifferences["I"] as InterfaceModification).getDetails(AppliedDirectiveAddition) - (appliedDirective[0].locationDetail as AppliedDirectiveInterfaceFieldArgumentLocation).interfaceName == "I" - (appliedDirective[0].locationDetail as AppliedDirectiveInterfaceFieldArgumentLocation).fieldName == "foo" - (appliedDirective[0].locationDetail as AppliedDirectiveInterfaceFieldArgumentLocation).argumentName == "arg" - appliedDirective[0].name == "d" + changes.directiveDifferences["d2"] instanceof DirectiveModification + (changes.directiveDifferences["d2"] as DirectiveModification).details.size() == 1 + def appliedDirectiveArgumentAddition = (changes.directiveDifferences["d2"] as DirectiveModification).getDetails(AppliedDirectiveArgumentAddition) + def location = appliedDirectiveArgumentAddition[0].locationDetail as AppliedDirectiveDirectiveArgumentLocation + location.directiveName == "d" + location.argumentName == "arg2" + appliedDirectiveArgumentAddition[0].argumentName == "arg" } - def "applied directive added directive argument "() { + def "applied directive argument deleted directive argument "() { given: def oldSdl = ''' directive @d(arg:String) on ARGUMENT_DEFINITION - directive @d2(arg:String) on ARGUMENT_DEFINITION + directive @d2(arg2:String @d(arg:"foo")) on ARGUMENT_DEFINITION type Query { foo: String } ''' def newSdl = ''' directive @d(arg:String) on ARGUMENT_DEFINITION - directive @d2(arg:String @d) on ARGUMENT_DEFINITION + directive @d2(arg2:String @d ) on ARGUMENT_DEFINITION type Query { foo: String } @@ -470,12 +1294,15 @@ class EditOperationAnalyzerAppliedDirectivesTest extends Specification { def changes = calcDiff(oldSdl, newSdl) then: changes.directiveDifferences["d2"] instanceof DirectiveModification - def appliedDirective = (changes.directiveDifferences["d2"] as DirectiveModification).getDetails(AppliedDirectiveAddition) - (appliedDirective[0].locationDetail as AppliedDirectiveDirectiveArgumentLocation).directiveName == "d2" - (appliedDirective[0].locationDetail as AppliedDirectiveDirectiveArgumentLocation).argumentName == "arg" - appliedDirective[0].name == "d" + (changes.directiveDifferences["d2"] as DirectiveModification).details.size() == 1 + def appliedDirectiveArgumentDeletion = (changes.directiveDifferences["d2"] as DirectiveModification).getDetails(AppliedDirectiveArgumentDeletion) + def location = appliedDirectiveArgumentDeletion[0].locationDetail as AppliedDirectiveDirectiveArgumentLocation + location.directiveName == "d" + location.argumentName == "arg2" + appliedDirectiveArgumentDeletion[0].argumentName == "arg" } + def "applied directive deleted object"() { given: def oldSdl = ''' @@ -501,17 +1328,17 @@ class EditOperationAnalyzerAppliedDirectivesTest extends Specification { appliedDirective[0].name == "d" } - def "applied directive deleted directive argument "() { + def "applied directive deleted argument directive argument"() { given: def oldSdl = ''' - directive @d(arg:String) on ARGUMENT_DEFINITION - directive @d2(arg:String @d) on ARGUMENT_DEFINITION + directive @d(arg1:String) on ARGUMENT_DEFINITION + directive @d2(arg:String @d(arg1:"foo")) on ARGUMENT_DEFINITION type Query { foo: String } ''' def newSdl = ''' - directive @d(arg:String) on ARGUMENT_DEFINITION + directive @d(arg1:String) on ARGUMENT_DEFINITION directive @d2(arg:String) on ARGUMENT_DEFINITION type Query { foo: String @@ -521,9 +1348,13 @@ class EditOperationAnalyzerAppliedDirectivesTest extends Specification { def changes = calcDiff(oldSdl, newSdl) then: changes.directiveDifferences["d2"] instanceof DirectiveModification + // whole applied directive is deleted, so we don't count the applied argument deletion + (changes.directiveDifferences["d2"] as DirectiveModification).details.size() == 1 def appliedDirective = (changes.directiveDifferences["d2"] as DirectiveModification).getDetails(AppliedDirectiveDeletion) - (appliedDirective[0].locationDetail as AppliedDirectiveDirectiveArgumentLocation).directiveName == "d2" - (appliedDirective[0].locationDetail as AppliedDirectiveDirectiveArgumentLocation).argumentName == "arg" + def location = appliedDirective[0].locationDetail as AppliedDirectiveDirectiveArgumentLocation + location.directiveDefinitionName == "d2" + location.argumentName == "arg" + location.directiveName == "d" appliedDirective[0].name == "d" } @@ -556,6 +1387,58 @@ class EditOperationAnalyzerAppliedDirectivesTest extends Specification { appliedDirective[0].name == "d" } + def "applied directive deleted argument enum"() { + given: + def oldSdl = ''' + directive @d(arg:String) on ENUM + enum E @d(arg: "foo") { A, B } + type Query { + foo: E + } + ''' + def newSdl = ''' + directive @d(arg:String) on ENUM + enum E @d { A, B } + type Query { + foo: E + } + ''' + when: + def changes = calcDiff(oldSdl, newSdl) + then: + changes.enumDifferences["E"] instanceof EnumModification + def argumentDeleted = (changes.enumDifferences["E"] as EnumModification).getDetails(AppliedDirectiveArgumentDeletion) + (argumentDeleted[0].locationDetail as AppliedDirectiveEnumLocation).name == "E" + argumentDeleted[0].argumentName == "arg" + } + + def "applied directive added argument enum"() { + given: + def oldSdl = ''' + directive @d(arg:String) on ENUM + enum E @d { A, B } + type Query { + foo: E + } + ''' + def newSdl = ''' + directive @d(arg:String) on ENUM + enum E @d(arg: "foo"){ A, B } + type Query { + foo: E + } + ''' + when: + def changes = calcDiff(oldSdl, newSdl) + then: + changes.enumDifferences["E"] instanceof EnumModification + def argumentAdded = (changes.enumDifferences["E"] as EnumModification).getDetails(AppliedDirectiveArgumentAddition) + (argumentAdded[0].locationDetail as AppliedDirectiveEnumLocation).name == "E" + argumentAdded[0].argumentName == "arg" + + } + + def "applied directive deleted enum value"() { given: def oldSdl = ''' @@ -642,6 +1525,72 @@ class EditOperationAnalyzerAppliedDirectivesTest extends Specification { appliedDirective[0].name == "d" } + + def "applied directive argument added input object field "() { + given: + def oldSdl = ''' + directive @d(arg:String) on INPUT_FIELD_DEFINITION + input I { + a: String @d + } + type Query { + foo(arg: I): String + } + ''' + def newSdl = ''' + directive @d(arg:String) on INPUT_FIELD_DEFINITION + input I { + a: String @d(arg: "foo") + } + type Query { + foo(arg: I): String + } + ''' + when: + def changes = calcDiff(oldSdl, newSdl) + then: + changes.inputObjectDifferences["I"] instanceof InputObjectModification + def argumentAdded = (changes.inputObjectDifferences["I"] as InputObjectModification).getDetails(AppliedDirectiveArgumentAddition) + def location = argumentAdded[0].locationDetail as AppliedDirectiveInputObjectFieldLocation + location.inputObjectName == "I" + location.fieldName == "a" + location.directiveName == "d" + argumentAdded[0].argumentName == "arg" + } + + def "applied directive argument deleted input object field "() { + given: + def oldSdl = ''' + directive @d(arg:String) on INPUT_FIELD_DEFINITION + input I { + a: String @d(arg: "foo") + } + type Query { + foo(arg: I): String + } + ''' + def newSdl = ''' + directive @d(arg:String) on INPUT_FIELD_DEFINITION + input I { + a: String @d + } + type Query { + foo(arg: I): String + } + ''' + when: + def changes = calcDiff(oldSdl, newSdl) + then: + changes.inputObjectDifferences["I"] instanceof InputObjectModification + def argumentDeletion = (changes.inputObjectDifferences["I"] as InputObjectModification).getDetails(AppliedDirectiveArgumentDeletion) + def location = argumentDeletion[0].locationDetail as AppliedDirectiveInputObjectFieldLocation + location.inputObjectName == "I" + location.fieldName == "a" + location.directiveName == "d" + argumentDeletion[0].argumentName == "arg" + } + + def "applied directive deleted interface"() { given: def oldSdl = ''' @@ -815,6 +1764,175 @@ class EditOperationAnalyzerAppliedDirectivesTest extends Specification { appliedDirective[0].name == "d" } + def "applied directive argument added union"() { + given: + def oldSdl = ''' + directive @d(arg:String) on UNION + type Query { + foo: FooBar + } + union FooBar @d = A | B + type A { a: String } + type B { b: String } + ''' + def newSdl = ''' + directive @d(arg:String) on UNION + type Query { + foo: FooBar + } + union FooBar @d(arg:"arg") = A | B + type A { a: String } + type B { b: String } + ''' + when: + def changes = calcDiff(oldSdl, newSdl) + then: + changes.unionDifferences["FooBar"] instanceof UnionModification + def argumentAdded = (changes.unionDifferences["FooBar"] as UnionModification).getDetails(AppliedDirectiveArgumentAddition) + (argumentAdded[0].locationDetail as AppliedDirectiveUnionLocation).name == "FooBar" + (argumentAdded[0].locationDetail as AppliedDirectiveUnionLocation).directiveName == "d" + argumentAdded[0].argumentName == "arg" + } + + def "applied directive argument deleted union"() { + given: + def oldSdl = ''' + directive @d(arg:String) on UNION + type Query { + foo: FooBar + } + union FooBar @d(arg:"arg") = A | B + type A { a: String } + type B { b: String } + ''' + def newSdl = ''' + directive @d(arg:String) on UNION + type Query { + foo: FooBar + } + union FooBar @d = A | B + type A { a: String } + type B { b: String } + ''' + when: + def changes = calcDiff(oldSdl, newSdl) + then: + changes.unionDifferences["FooBar"] instanceof UnionModification + def argumentDeleted = (changes.unionDifferences["FooBar"] as UnionModification).getDetails(AppliedDirectiveArgumentDeletion) + (argumentDeleted[0].locationDetail as AppliedDirectiveUnionLocation).name == "FooBar" + (argumentDeleted[0].locationDetail as AppliedDirectiveUnionLocation).directiveName == "d" + argumentDeleted[0].argumentName == "arg" + } + + + def "applied directive argument added scalar"() { + given: + def oldSdl = ''' + directive @d(arg:String) on SCALAR + scalar DateTime @d + type Query { + foo: DateTime + } + ''' + def newSdl = ''' + directive @d(arg:String) on SCALAR + scalar DateTime @d(arg: "foo") + type Query { + foo: DateTime + } + ''' + when: + def changes = calcDiff(oldSdl, newSdl) + then: + changes.scalarDifferences["DateTime"] instanceof ScalarModification + def argumentAdded = (changes.scalarDifferences["DateTime"] as ScalarModification).getDetails(AppliedDirectiveArgumentAddition) + (argumentAdded[0].locationDetail as AppliedDirectiveScalarLocation).name == "DateTime" + argumentAdded[0].argumentName == "arg" + } + + def "applied directive argument deleted scalar"() { + given: + def oldSdl = ''' + directive @d(arg:String) on SCALAR + scalar DateTime @d(arg: "foo") + type Query { + foo: DateTime + } + ''' + def newSdl = ''' + directive @d(arg:String) on SCALAR + scalar DateTime @d + type Query { + foo: DateTime + } + ''' + when: + def changes = calcDiff(oldSdl, newSdl) + then: + changes.scalarDifferences["DateTime"] instanceof ScalarModification + def argumentDeletion = (changes.scalarDifferences["DateTime"] as ScalarModification).getDetails(AppliedDirectiveArgumentDeletion) + (argumentDeletion[0].locationDetail as AppliedDirectiveScalarLocation).name == "DateTime" + argumentDeletion[0].argumentName == "arg" + } + + def "applied directive argument added input object"() { + given: + def oldSdl = ''' + directive @d(arg:String) on INPUT_OBJECT + input I @d { + a: String + } + type Query { + foo(arg: I): String + } + ''' + def newSdl = ''' + directive @d(arg:String) on INPUT_OBJECT + input I @d(arg: "foo") { + a: String + } + type Query { + foo(arg: I): String + } + ''' + when: + def changes = calcDiff(oldSdl, newSdl) + then: + changes.inputObjectDifferences["I"] instanceof InputObjectModification + def argumentAdded = (changes.inputObjectDifferences["I"] as InputObjectModification).getDetails(AppliedDirectiveArgumentAddition) + (argumentAdded[0].locationDetail as AppliedDirectiveInputObjectLocation).name == "I" + argumentAdded[0].argumentName == "arg" + } + + def "applied directive argument deleted input object"() { + given: + def oldSdl = ''' + directive @d(arg:String) on INPUT_OBJECT + input I @d(arg: "foo") { + a: String + } + type Query { + foo(arg: I): String + } + ''' + def newSdl = ''' + directive @d(arg:String) on INPUT_OBJECT + input I @d { + a: String + } + type Query { + foo(arg: I): String + } + ''' + when: + def changes = calcDiff(oldSdl, newSdl) + then: + changes.inputObjectDifferences["I"] instanceof InputObjectModification + def argumentAdded = (changes.inputObjectDifferences["I"] as InputObjectModification).getDetails(AppliedDirectiveArgumentDeletion) + (argumentAdded[0].locationDetail as AppliedDirectiveInputObjectLocation).name == "I" + argumentAdded[0].argumentName == "arg" + } + EditOperationAnalysisResult calcDiff( String oldSdl, diff --git a/src/test/groovy/graphql/schema/diffing/ana/EditOperationAnalyzerTest.groovy b/src/test/groovy/graphql/schema/diffing/ana/EditOperationAnalyzerTest.groovy index 2ab7bbd7b2..8661293408 100644 --- a/src/test/groovy/graphql/schema/diffing/ana/EditOperationAnalyzerTest.groovy +++ b/src/test/groovy/graphql/schema/diffing/ana/EditOperationAnalyzerTest.groovy @@ -1859,8 +1859,9 @@ class EditOperationAnalyzerTest extends Specification { directiveDeletion[0].locationDetail instanceof AppliedDirectiveDirectiveArgumentLocation def location = directiveDeletion[0].locationDetail as AppliedDirectiveDirectiveArgumentLocation - location.directiveName == "d" + location.directiveDefinitionName == "d" location.argumentName == "message" + location.directiveName == "a" } def "field output type changed and applied directive removed"() { diff --git a/src/test/groovy/graphql/schema/idl/RuntimeWiringTest.groovy b/src/test/groovy/graphql/schema/idl/RuntimeWiringTest.groovy index a12b854fe7..bd3c225d2a 100644 --- a/src/test/groovy/graphql/schema/idl/RuntimeWiringTest.groovy +++ b/src/test/groovy/graphql/schema/idl/RuntimeWiringTest.groovy @@ -1,5 +1,6 @@ package graphql.schema.idl +import graphql.Scalars import graphql.TypeResolutionEnvironment import graphql.schema.Coercing import graphql.schema.DataFetcher @@ -9,6 +10,7 @@ import graphql.schema.GraphQLFieldsContainer import graphql.schema.GraphQLObjectType import graphql.schema.GraphQLScalarType import graphql.schema.TypeResolver +import graphql.schema.idl.errors.StrictModeWiringException import graphql.schema.visibility.GraphqlFieldVisibility import spock.lang.Specification @@ -62,22 +64,22 @@ class RuntimeWiringTest extends Specification { def "basic call structure"() { def wiring = RuntimeWiring.newRuntimeWiring() .type("Query", { type -> - type - .dataFetcher("fieldX", new NamedDF("fieldX")) - .dataFetcher("fieldY", new NamedDF("fieldY")) - .dataFetcher("fieldZ", new NamedDF("fieldZ")) - .defaultDataFetcher(new NamedDF("defaultQueryDF")) - .typeResolver(new NamedTR("typeResolver4Query")) - } as UnaryOperator) + type + .dataFetcher("fieldX", new NamedDF("fieldX")) + .dataFetcher("fieldY", new NamedDF("fieldY")) + .dataFetcher("fieldZ", new NamedDF("fieldZ")) + .defaultDataFetcher(new NamedDF("defaultQueryDF")) + .typeResolver(new NamedTR("typeResolver4Query")) + } as UnaryOperator) .type("Mutation", { type -> - type - .dataFetcher("fieldX", new NamedDF("mfieldX")) - .dataFetcher("fieldY", new NamedDF("mfieldY")) - .dataFetcher("fieldZ", new NamedDF("mfieldZ")) - .defaultDataFetcher(new NamedDF("defaultMutationDF")) - .typeResolver(new NamedTR("typeResolver4Mutation")) - } as UnaryOperator) + type + .dataFetcher("fieldX", new NamedDF("mfieldX")) + .dataFetcher("fieldY", new NamedDF("mfieldY")) + .dataFetcher("fieldZ", new NamedDF("mfieldZ")) + .defaultDataFetcher(new NamedDF("defaultMutationDF")) + .typeResolver(new NamedTR("typeResolver4Mutation")) + } as UnaryOperator) .build() @@ -190,4 +192,49 @@ class RuntimeWiringTest extends Specification { newWiring.scalars["Custom2"] == customScalar2 newWiring.fieldVisibility == fieldVisibility } + + def "strict mode can stop certain redefinitions"() { + DataFetcher DF1 = env -> "x" + TypeResolver TR1 = env -> null + EnumValuesProvider EVP1 = name -> null + + when: + RuntimeWiring.newRuntimeWiring() + .strictMode() + .type(TypeRuntimeWiring.newTypeWiring("Foo").dataFetcher("foo", DF1)) + .type(TypeRuntimeWiring.newTypeWiring("Foo").dataFetcher("bar", DF1)) + + + then: + def e1 = thrown(StrictModeWiringException) + e1.message == "The type Foo has already been defined" + + when: + RuntimeWiring.newRuntimeWiring() + .strictMode() + .type(TypeRuntimeWiring.newTypeWiring("Foo").typeResolver(TR1)) + .type(TypeRuntimeWiring.newTypeWiring("Foo").typeResolver(TR1)) + + then: + def e2 = thrown(StrictModeWiringException) + e2.message == "The type Foo already has a type resolver defined" + + when: + RuntimeWiring.newRuntimeWiring() + .strictMode() + .type(TypeRuntimeWiring.newTypeWiring("Foo").enumValues(EVP1)) + .type(TypeRuntimeWiring.newTypeWiring("Foo").enumValues(EVP1)) + then: + def e3 = thrown(StrictModeWiringException) + e3.message == "The type Foo already has a enum provider defined" + + when: + RuntimeWiring.newRuntimeWiring() + .strictMode() + .scalar(Scalars.GraphQLString) + then: + def e4 = thrown(StrictModeWiringException) + e4.message == "The scalar String is already defined" + + } } diff --git a/src/test/groovy/graphql/schema/idl/SchemaGeneratorTest.groovy b/src/test/groovy/graphql/schema/idl/SchemaGeneratorTest.groovy index 0d2be323d0..d8895370c6 100644 --- a/src/test/groovy/graphql/schema/idl/SchemaGeneratorTest.groovy +++ b/src/test/groovy/graphql/schema/idl/SchemaGeneratorTest.groovy @@ -1,6 +1,5 @@ package graphql.schema.idl - import graphql.TestUtil import graphql.introspection.Introspection import graphql.language.Node @@ -9,9 +8,7 @@ import graphql.schema.DataFetcherFactory import graphql.schema.DataFetcherFactoryEnvironment import graphql.schema.DataFetchingEnvironment import graphql.schema.GraphQLAppliedDirective -import graphql.schema.GraphQLArgument import graphql.schema.GraphQLCodeRegistry -import graphql.schema.GraphQLDirective import graphql.schema.GraphQLDirectiveContainer import graphql.schema.GraphQLEnumType import graphql.schema.GraphQLFieldDefinition @@ -1820,32 +1817,6 @@ class SchemaGeneratorTest extends Specification { assert schema != null } - def "transformers get called once the schema is built"() { - def spec = """ - type Query { - hello: String - } - """ - - def types = new SchemaParser().parse(spec) - - def extraDirective = (GraphQLDirective.newDirective()).name("extra") - .argument(GraphQLArgument.newArgument().name("value").type(GraphQLString)).build() - def transformer = new SchemaGeneratorPostProcessing() { // Retained to show deprecated code is still run - @Override - GraphQLSchema process(GraphQLSchema originalSchema) { - originalSchema.transform({ builder -> builder.additionalDirective(extraDirective) }) - } - } - def wiring = RuntimeWiring.newRuntimeWiring() - .transformer(transformer) // Retained to show deprecated code is still run - .build() - GraphQLSchema schema = new SchemaGenerator().makeExecutableSchema(types, wiring) - expect: - assert schema != null - schema.getDirective("extra") != null - } - def "enum object default values are handled"() { def spec = ''' enum EnumValue { diff --git a/src/test/groovy/graphql/schema/idl/SchemaPrinterTest.groovy b/src/test/groovy/graphql/schema/idl/SchemaPrinterTest.groovy index c0c83ddca8..1cff3fffec 100644 --- a/src/test/groovy/graphql/schema/idl/SchemaPrinterTest.groovy +++ b/src/test/groovy/graphql/schema/idl/SchemaPrinterTest.groovy @@ -1556,7 +1556,7 @@ extend type Query { ''' } - def "@deprecated directives are always printed"() { + def "@deprecated directives are NOT always printed - they used to be"() { given: def idl = """ @@ -1588,7 +1588,7 @@ extend type Query { then: result == '''type Field { - deprecated: Enum @deprecated(reason : "No longer supported") + deprecated: Enum } type Query { @@ -1596,11 +1596,11 @@ type Query { } enum Enum { - enumVal @deprecated(reason : "No longer supported") + enumVal } input Input { - deprecated: String @deprecated(reason : "custom reason") + deprecated: String } ''' } @@ -1641,7 +1641,7 @@ type Query { ''' } - def "@deprecated directive are always printed regardless of options"() { + def "@deprecated directive are NOT always printed regardless of options"() { given: def idl = ''' @@ -1660,6 +1660,37 @@ type Query { then: result == '''type Query { + fieldX: String +} +''' + } + + def "@deprecated directive are printed respecting options"() { + given: + def idl = ''' + + type Query { + fieldX : String @deprecated + } + + ''' + def registry = new SchemaParser().parse(idl) + def runtimeWiring = newRuntimeWiring().build() + def options = SchemaGenerator.Options.defaultOptions() + def schema = new SchemaGenerator().makeExecutableSchema(options, registry, runtimeWiring) + + when: + def printOptions = defaultOptions().includeDirectives({ dName -> (dName == "deprecated") }) + def result = new SchemaPrinter(printOptions).print(schema) + + then: + result == '''"Marks the field, argument, input field or enum value as deprecated" +directive @deprecated( + "The reason for the deprecation" + reason: String = "No longer supported" + ) on FIELD_DEFINITION | ARGUMENT_DEFINITION | ENUM_VALUE | INPUT_FIELD_DEFINITION + +type Query { fieldX: String @deprecated(reason : "No longer supported") } ''' @@ -2678,7 +2709,10 @@ input Gun { .query(queryType) .build() when: - def result = "\n" + new SchemaPrinter(noDirectivesOption).print(schema) + + def printOptions = defaultOptions().includeDirectiveDefinitions(false).includeDirectives({ d -> true }) + + def result = "\n" + new SchemaPrinter(printOptions).print(schema) println(result) then: @@ -2698,6 +2732,51 @@ enum Enum { input Input { deprecatedWithReason: Enum @deprecated(reason : "Custom input reason") } +""" + } + + def "can use predicate for directive definitions"() { + + def schema = TestUtil.schema(""" + type Query { + field: String @deprecated + } + """) + + + def options = defaultOptions() + .includeDirectiveDefinitions(true) + .includeDirectiveDefinition({ it != "skip" }) + def result = new SchemaPrinter(options).print(schema) + + expect: "has no skip definition" + + result == """"Marks the field, argument, input field or enum value as deprecated" +directive @deprecated( + "The reason for the deprecation" + reason: String = "No longer supported" + ) on FIELD_DEFINITION | ARGUMENT_DEFINITION | ENUM_VALUE | INPUT_FIELD_DEFINITION + +"Directs the executor to include this field or fragment only when the `if` argument is true" +directive @include( + "Included when true." + if: Boolean! + ) on FIELD | FRAGMENT_SPREAD | INLINE_FRAGMENT + +"Indicates an Input Object is a OneOf Input Object." +directive @oneOf on INPUT_OBJECT + +"Exposes a URL that specifies the behaviour of this scalar." +directive @specifiedBy( + "The URL that specifies the behaviour of this scalar." + url: String! + ) on SCALAR + +type Query { + field: String @deprecated(reason : "No longer supported") +} """ } } + + diff --git a/src/test/groovy/graphql/schema/idl/TypeRuntimeWiringTest.groovy b/src/test/groovy/graphql/schema/idl/TypeRuntimeWiringTest.groovy new file mode 100644 index 0000000000..2a833ace22 --- /dev/null +++ b/src/test/groovy/graphql/schema/idl/TypeRuntimeWiringTest.groovy @@ -0,0 +1,88 @@ +package graphql.schema.idl + +import graphql.schema.DataFetcher +import graphql.schema.idl.errors.StrictModeWiringException +import spock.lang.Specification + +class TypeRuntimeWiringTest extends Specification { + + void setup() { + TypeRuntimeWiring.setStrictModeJvmWide(false) + } + + void cleanup() { + TypeRuntimeWiring.setStrictModeJvmWide(false) + } + + DataFetcher DF1 = env -> "x" + DataFetcher DF2 = env -> "y" + + def "strict mode is off by default"() { + when: + def typeRuntimeWiring = TypeRuntimeWiring.newTypeWiring("Foo") + .dataFetcher("foo", DF1) + .dataFetcher("foo", DF2) + .build() + then: + typeRuntimeWiring.getFieldDataFetchers().get("foo") == DF2 + } + + def "strict mode can be turned on"() { + when: + TypeRuntimeWiring.newTypeWiring("Foo") + .strictMode() + .dataFetcher("foo", DF1) + .dataFetcher("foo", DF2) + .build() + then: + def e = thrown(StrictModeWiringException) + e.message == "The field foo already has a data fetcher defined" + } + + def "strict mode can be turned on for maps of fields"() { + when: + TypeRuntimeWiring.newTypeWiring("Foo") + .strictMode() + .dataFetcher("foo", DF1) + .dataFetchers(["foo": DF2]) + .build() + then: + def e = thrown(StrictModeWiringException) + e.message == "The field foo already has a data fetcher defined" + } + + def "strict mode can be turned on JVM wide"() { + + + when: + def inStrictMode = TypeRuntimeWiring.getStrictModeJvmWide() + then: + !inStrictMode + + + when: + TypeRuntimeWiring.setStrictModeJvmWide(true) + inStrictMode = TypeRuntimeWiring.getStrictModeJvmWide() + + TypeRuntimeWiring.newTypeWiring("Foo") + .dataFetcher("foo", DF1) + .dataFetcher("foo", DF2) + .build() + then: + inStrictMode + def e = thrown(StrictModeWiringException) + e.message == "The field foo already has a data fetcher defined" + + when: + TypeRuntimeWiring.setStrictModeJvmWide(false) + inStrictMode = TypeRuntimeWiring.getStrictModeJvmWide() + + TypeRuntimeWiring.newTypeWiring("Foo") + .dataFetcher("foo", DF1) + .dataFetcher("foo", DF2) + .build() + then: + !inStrictMode + noExceptionThrown() + } +} diff --git a/src/test/groovy/graphql/schema/visibility/GraphqlFieldVisibilityTest.groovy b/src/test/groovy/graphql/schema/visibility/GraphqlFieldVisibilityTest.groovy index 9f3e083977..e292cffb9a 100644 --- a/src/test/groovy/graphql/schema/visibility/GraphqlFieldVisibilityTest.groovy +++ b/src/test/groovy/graphql/schema/visibility/GraphqlFieldVisibilityTest.groovy @@ -26,37 +26,6 @@ import static graphql.schema.visibility.NoIntrospectionGraphqlFieldVisibility.NO class GraphqlFieldVisibilityTest extends Specification { - def "visibility is enforced"() { - GraphqlFieldVisibility banNameVisibility = newBlock().addPattern(".*\\.name").build() - def schema = GraphQLSchema.newSchema() - .query(StarWarsSchema.queryType) - .codeRegistry(StarWarsSchema.codeRegistry) - .fieldVisibility(banNameVisibility) // Retain deprecated builder for test coverage - .build() - - def graphQL = GraphQL.newGraphQL(schema).build() - - given: - def query = """ - { - hero { - id - name - friends { - aliasHandled: name - } - } - } - """ - - when: - def result = graphQL.execute(query) - - then: - result.errors[0].getMessage().contains("Field 'name' in type 'Character' is undefined") - result.errors[1].getMessage().contains("Field 'name' in type 'Character' is undefined") - } - def "introspection visibility is enforced"() { given: GraphQLCodeRegistry codeRegistry = StarWarsSchema.codeRegistry.transform(builder -> { diff --git a/src/test/groovy/graphql/util/AnonymizerTest.groovy b/src/test/groovy/graphql/util/AnonymizerTest.groovy index d1bf02e20a..439132eda3 100644 --- a/src/test/groovy/graphql/util/AnonymizerTest.groovy +++ b/src/test/groovy/graphql/util/AnonymizerTest.groovy @@ -718,7 +718,7 @@ type Object1 { when: def result = Anonymizer.anonymizeSchema(schema) def newSchema = new SchemaPrinter(SchemaPrinter.Options.defaultOptions() - .includeDirectives({!DirectiveInfo.isGraphqlSpecifiedDirective(it)})) + .includeDirectives({!DirectiveInfo.isGraphqlSpecifiedDirective(it) || it == "deprecated"})) .print(result) then: @@ -729,6 +729,8 @@ type Object1 { directive @Directive1(argument1: String! = "stringValue4") repeatable on SCHEMA | SCALAR | OBJECT | FIELD_DEFINITION | ARGUMENT_DEFINITION | INTERFACE | UNION | ENUM | ENUM_VALUE | INPUT_OBJECT | INPUT_FIELD_DEFINITION + directive @deprecated(reason: String) on FIELD_DEFINITION | ARGUMENT_DEFINITION | ENUM_VALUE | INPUT_FIELD_DEFINITION + interface Interface1 @Directive1(argument1 : "stringValue12") { field2: String field3: Enum1 diff --git a/src/test/groovy/graphql/util/CyclicSchemaAnalyzerTest.groovy b/src/test/groovy/graphql/util/CyclicSchemaAnalyzerTest.groovy new file mode 100644 index 0000000000..d24c2289c6 --- /dev/null +++ b/src/test/groovy/graphql/util/CyclicSchemaAnalyzerTest.groovy @@ -0,0 +1,311 @@ +package graphql.util + + +import graphql.TestUtil +import spock.lang.Specification + +class CyclicSchemaAnalyzerTest extends Specification { + + def "simple cycle"() { + given: + def sdl = ''' + + type Query { + hello: [Foo] + } + type Foo { + foo: Foo + } + ''' + def schema = TestUtil.schema(sdl) + when: + def cycles = CyclicSchemaAnalyzer.findCycles(schema) + + then: + cycles.size() == 1 + cycles[0].toString() == "[Foo.foo, Foo]" + + } + + def "simple cycle with interfaces"() { + given: + def sdl = ''' + + type Query { + hello: [Foo] + } + interface Foo { + foo: Foo + } + type Impl implements Foo { + foo: Foo + } + ''' + def schema = TestUtil.schema(sdl) + when: + def cycles = CyclicSchemaAnalyzer.findCycles(schema) + + then: + cycles.size() == 1 + cycles[0].toString() == "[Foo.foo, Foo]" + + } + + def "input field cycle"() { + given: + def sdl = ''' + type Query { + hello(i: I): String + } + input I { + foo: I + } + ''' + def schema = TestUtil.schema(sdl) + when: + def cycles = CyclicSchemaAnalyzer.findCycles(schema) + + then: + cycles.size() == 1 + cycles[0].toString() == "[I.foo, I]" + + } + + def "multiple cycles"() { + given: + def sdl = ''' + + type Query { + hello: [Foo] + } + type Foo { + bar: Bar + foo: Foo + } + type Bar { + bar: [Bar]! + foo: Foo + } + ''' + def schema = TestUtil.schema(sdl) + when: + def cycles = CyclicSchemaAnalyzer.findCycles(schema) + + then: + cycles.size() == 3 + cycles[0].toString() == "[Foo.bar, Bar, Bar.foo, Foo]" + cycles[1].toString() == "[Foo.foo, Foo]" + cycles[2].toString() == "[Bar.bar, Bar]" + + } + + def "larger cycle"() { + given: + def sdl = ''' + + type Query { + hello: [Foo] + } + type Foo { + bar: Bar + } + type Bar { + subBar: SubBar + } + type SubBar { + foo: Foo + } + + ''' + def schema = TestUtil.schema(sdl) + when: + def cycles = CyclicSchemaAnalyzer.findCycles(schema) + + then: + cycles.size() == 1 + cycles[0].toString() == "[Foo.bar, Bar, Bar.subBar, SubBar, SubBar.foo, Foo]" + + } + + def "two parents and no cycle"() { + given: + def sdl = ''' + + type Query { + hello: Foo1 + hello2: Foo2 + } + type Foo1 { + bar: Bar + } + type Foo2 { + bar: Bar + } + type Bar { + id: ID + } + ''' + def schema = TestUtil.schema(sdl) + when: + def cycles = CyclicSchemaAnalyzer.findCycles(schema) + + then: + cycles.size() == 0 + + } + + def "cycle test"() { + given: + def sdl = ''' + type Query { + foo: Foo + } + type Foo { + f1: Foo + f2: Foo + } + ''' + def schema = TestUtil.schema(sdl) + when: + def cycles = CyclicSchemaAnalyzer.findCycles(schema) + + then: + cycles.size() == 2 + cycles[0].toString() == "[Foo.f1, Foo]" + cycles[1].toString() == "[Foo.f2, Foo]" + + + } + + def "cycle test 2"() { + given: + def sdl = ''' + type Query { + foo: Foo + } + type Foo { + f1: Foo + f2: Bar + } + type Bar { + foo: Foo + } + ''' + def schema = TestUtil.schema(sdl) + when: + def cycles = CyclicSchemaAnalyzer.findCycles(schema) + + then: + cycles.size() == 2 + cycles[0].toString() == "[Foo.f1, Foo]" + cycles[1].toString() == "[Foo.f2, Bar, Bar.foo, Foo]" + + } + + def "cycle test 3"() { + given: + def sdl = ''' + type Query { + foo: Foo + } + type Foo { + issues: [IssueConnection] + } + type IssueConnection { + edges: [Edge] + nodes: [Issue] + } + type Edge { + node: Issue + } + type Issue { + foo: Foo + } + ''' + def schema = TestUtil.schema(sdl) + when: + def cycles = CyclicSchemaAnalyzer.findCycles(schema) + + then: + //TODO: should be 2 + cycles.size() == 2 + cycles[0].toString() == "[Foo.issues, IssueConnection, IssueConnection.nodes, Issue, Issue.foo, Foo]" + cycles[1].toString() == "[Foo.issues, IssueConnection, IssueConnection.edges, Edge, Edge.node, Issue, Issue.foo, Foo]" + + } + + def "cycle test 4"() { + given: + def sdl = ''' + type Query { + foo: Foo + } + type Foo { + issues: [IssueConnection] + } + type IssueConnection { + edges: [Edge] + nodes: [Foo] + } + type Edge { + node: Foo + } + ''' + def schema = TestUtil.schema(sdl) + when: + def cycles = CyclicSchemaAnalyzer.findCycles(schema) + + then: + cycles.size() == 2 + cycles[0].toString() == "[Foo.issues, IssueConnection, IssueConnection.nodes, Foo]" + cycles[1].toString() == "[Foo.issues, IssueConnection, IssueConnection.edges, Edge, Edge.node, Foo]" + + } + + def "cycle with Union"() { + given: + def sdl = ''' + type Query { + foo: Foo + } + union Foo = Bar | Baz + type Bar { + bar: Foo + } + type Baz { + bar: Foo + } + ''' + def schema = TestUtil.schema(sdl) + when: + def cycles = CyclicSchemaAnalyzer.findCycles(schema) + + then: + cycles.size() == 2 + cycles[0].toString() == "[Foo, Baz, Baz.bar]" + cycles[1].toString() == "[Foo, Bar, Bar.bar]" + + } + + def "introspection cycles "() { + given: + def sdl = ''' + type Query { + hello: String + } + ''' + def schema = TestUtil.schema(sdl) + when: + def cycles = CyclicSchemaAnalyzer.findCycles(schema, false) + + then: + cycles.size() == 6 + cycles[0].toString() == "[__Type.fields, __Field, __Field.type, __Type]" + cycles[1].toString() == "[__Type.fields, __Field, __Field.args, __InputValue, __InputValue.type, __Type]" + cycles[2].toString() == "[__Type.interfaces, __Type]" + cycles[3].toString() == "[__Type.possibleTypes, __Type]" + cycles[4].toString() == "[__Type.inputFields, __InputValue, __InputValue.type, __Type]" + cycles[5].toString() == "[__Type.ofType, __Type]" + + } +} diff --git a/src/test/groovy/graphql/util/IdGeneratorTest.groovy b/src/test/groovy/graphql/util/IdGeneratorTest.groovy new file mode 100644 index 0000000000..99a9344012 --- /dev/null +++ b/src/test/groovy/graphql/util/IdGeneratorTest.groovy @@ -0,0 +1,17 @@ +package graphql.util + +import spock.lang.Specification + +class IdGeneratorTest extends Specification { + def "can generate uuids"() { + when: + def set = new HashSet() + for (int i = 0; i < 1000; i++) { + set.add(IdGenerator.uuid().toString()); + } + + then: + // should this fail - the universe has ended and has retracted back into the singularity + set.size() == 1000 + } +} diff --git a/src/test/groovy/graphql/util/LogKitTest.groovy b/src/test/groovy/graphql/util/LogKitTest.groovy deleted file mode 100644 index eeb1582baa..0000000000 --- a/src/test/groovy/graphql/util/LogKitTest.groovy +++ /dev/null @@ -1,14 +0,0 @@ -package graphql.util - - -import spock.lang.Specification - -class LogKitTest extends Specification { - - def "logger has a prefixed name"() { - when: - def logger = LogKit.getNotPrivacySafeLogger(LogKitTest.class) - then: - logger.getName() == "notprivacysafe.graphql.util.LogKitTest" - } -} diff --git a/src/test/groovy/graphql/util/StringKitTest.groovy b/src/test/groovy/graphql/util/StringKitTest.groovy new file mode 100644 index 0000000000..a2a428bce6 --- /dev/null +++ b/src/test/groovy/graphql/util/StringKitTest.groovy @@ -0,0 +1,22 @@ +package graphql.util + +import spock.lang.Specification + +class StringKitTest extends Specification { + + + def "can capitalise"() { + expect: + + def actual = StringKit.capitalize(input) + actual == expected + + where: + input | expected + null | null + "" | "" + "a" | "A" + "abc" | "Abc" + + } +} diff --git a/src/test/groovy/graphql/validation/SpecValidationSchema.java b/src/test/groovy/graphql/validation/SpecValidationSchema.java index 45a6b2637f..060a2399cd 100644 --- a/src/test/groovy/graphql/validation/SpecValidationSchema.java +++ b/src/test/groovy/graphql/validation/SpecValidationSchema.java @@ -30,7 +30,10 @@ import static graphql.introspection.Introspection.DirectiveLocation.QUERY; import static graphql.schema.GraphQLArgument.newArgument; import static graphql.schema.GraphQLFieldDefinition.newFieldDefinition; +import static graphql.schema.GraphQLInputObjectField.newInputObjectField; +import static graphql.schema.GraphQLInputObjectType.newInputObject; import static graphql.schema.GraphQLNonNull.nonNull; +import static graphql.schema.GraphqlTypeComparatorRegistry.BY_NAME_REGISTRY; import static java.util.Collections.singletonList; /** @@ -215,6 +218,25 @@ public class SpecValidationSchema { .field(newFieldDefinition().name("cat").type(cat)) .build(); + public static GraphQLInputObjectType inputDogType = newInputObject() + .name("DogInput") + .description("Input for A Dog creation.") + .field(newInputObjectField() + .name("id") + .description("The id of the dog.") + .type(nonNull(GraphQLString))) + .build(); + + public static final GraphQLObjectType petMutationType = GraphQLObjectType.newObject() + .name("PetMutationType") + .field(newFieldDefinition() + .name("createDog") + .type(dog) + .argument(newArgument() + .name("input") + .type(inputDogType))) + .build(); + public static final Set specValidationDictionary = new HashSet() {{ add(dogCommand); add(catCommand); @@ -275,11 +297,13 @@ public class SpecValidationSchema { .query(queryRoot) .codeRegistry(codeRegistry) .subscription(subscriptionRoot) + .mutation(petMutationType) .additionalDirective(upperDirective) .additionalDirective(lowerDirective) .additionalDirective(dogDirective) .additionalDirective(nonNullDirective) .additionalDirective(objectArgumentDirective) + .additionalDirective(Directives.DeferDirective) .additionalTypes(specValidationDictionary) .build(); diff --git a/src/test/groovy/graphql/validation/rules/DeferDirectiveLabelTest.groovy b/src/test/groovy/graphql/validation/rules/DeferDirectiveLabelTest.groovy new file mode 100644 index 0000000000..2e3975269d --- /dev/null +++ b/src/test/groovy/graphql/validation/rules/DeferDirectiveLabelTest.groovy @@ -0,0 +1,213 @@ +package graphql.validation.rules + +import graphql.ExperimentalApi +import graphql.GraphQLContext +import graphql.language.Document +import graphql.parser.Parser +import graphql.validation.LanguageTraversal +import graphql.validation.RulesVisitor +import graphql.validation.SpecValidationSchema +import graphql.validation.TraversalContext +import graphql.validation.ValidationContext +import graphql.validation.ValidationError +import graphql.validation.ValidationErrorCollector +import graphql.validation.ValidationErrorType +import graphql.validation.Validator +import spock.lang.Specification + +class DeferDirectiveLabelTest extends Specification { + + ValidationContext validationContext = Mock(ValidationContext) + + ValidationErrorCollector errorCollector = new ValidationErrorCollector() + + DeferDirectiveLabel deferDirectiveLabel = new DeferDirectiveLabel(validationContext, errorCollector) + + def setup() { + def traversalContext = Mock(TraversalContext) + validationContext.getSchema() >> SpecValidationSchema.specValidationSchema + validationContext.getGraphQLContext() >> GraphQLContext.newContext().of( + ExperimentalApi.ENABLE_INCREMENTAL_SUPPORT, true + ).build(); + validationContext.getTraversalContext() >> traversalContext + } + + def "Allow unique label directive"() { + given: + def query = """ + query defer_query { + ... @defer(label: "name") { + human { + name + } + } + } + """ + Document document = new Parser().parseDocument(query) + LanguageTraversal languageTraversal = new LanguageTraversal() + + when: + languageTraversal.traverse(document, new RulesVisitor(validationContext, [deferDirectiveLabel])) + + then: + errorCollector.errors.isEmpty() + + } + + def "Defer directive label argument must be unique"() { + given: + def query = """ + query defer_query { + dog { + ... @defer(label: "name") { + name + } + } + alien { + ... @defer(label: "name") { + name + } + } + + } + """ + + Document document = new Parser().parseDocument(query) + LanguageTraversal languageTraversal = new LanguageTraversal() + + when: + languageTraversal.traverse(document, new RulesVisitor(validationContext, [deferDirectiveLabel])) + + then: + !errorCollector.errors.isEmpty() + errorCollector.containsValidationError(ValidationErrorType.DuplicateIncrementalLabel) + } + + def "Multiple use of Defer directive is valid"() { + given: + def query = """ + query defer_query { + dog { + ... @defer { + name + } + ... @defer { + name + } + } + } + """ + Document document = new Parser().parseDocument(query) + LanguageTraversal languageTraversal = new LanguageTraversal() + + when: + languageTraversal.traverse(document, new RulesVisitor(validationContext, [deferDirectiveLabel])) + + then: + errorCollector.errors.isEmpty() + } + + def "Allow Multiple use of Defer directive with different labels"() { + given: + def query = """ + query defer_query { + dog { + ... @defer(label: "name") { + name + } + ... @defer(label: "nameAgain") { + name + } + } + } + """ + Document document = new Parser().parseDocument(query) + LanguageTraversal languageTraversal = new LanguageTraversal() + + when: + languageTraversal.traverse(document, new RulesVisitor(validationContext, [deferDirectiveLabel])) + + then: + errorCollector.errors.isEmpty() + } + + + def "Label cannot be an argument directive"() { + given: + def query = """ + query defer_query(\$label: Int) { + ... @defer(label:\$label) { + human { + name + } + } + } + """ + + Document document = new Parser().parseDocument(query) + LanguageTraversal languageTraversal = new LanguageTraversal() + + when: + languageTraversal.traverse(document, new RulesVisitor(validationContext, [deferDirectiveLabel])) + + then: + !errorCollector.errors.isEmpty() + errorCollector.containsValidationError(ValidationErrorType.WrongType) + } + + + def "Defer directive Label must be string"() { + given: + def query = """ + query defer_query { + dog { + ... @defer(label: 1) { + name + } + } + } + """ + Document document = new Parser().parseDocument(query) + LanguageTraversal languageTraversal = new LanguageTraversal() + + when: + languageTraversal.traverse(document, new RulesVisitor(validationContext, [deferDirectiveLabel])) + + then: + !errorCollector.errors.isEmpty() + errorCollector.errors.size() == 1 + errorCollector.containsValidationError(ValidationErrorType.WrongType) + } + + def "defer with null label should behave as if no label was provided"() { + def query = ''' + query { + dog { + ... @defer(label: null) { + name + } + } + cat { + ... @defer(label: null) { + name + } + } + } + ''' + Document document = new Parser().parseDocument(query) + LanguageTraversal languageTraversal = new LanguageTraversal() + + when: + languageTraversal.traverse(document, new RulesVisitor(validationContext, [deferDirectiveLabel])) + + then: + errorCollector.errors.isEmpty() + } + + + static List validate(String query) { + def document = new Parser().parseDocument(query) + return new Validator().validateDocument(SpecValidationSchema.specValidationSchema, document, Locale.ENGLISH) + } +} + diff --git a/src/test/groovy/graphql/validation/rules/DeferDirectiveOnRootLevelTest.groovy b/src/test/groovy/graphql/validation/rules/DeferDirectiveOnRootLevelTest.groovy new file mode 100644 index 0000000000..ec65ec1937 --- /dev/null +++ b/src/test/groovy/graphql/validation/rules/DeferDirectiveOnRootLevelTest.groovy @@ -0,0 +1,473 @@ +package graphql.validation.rules + +import graphql.ExperimentalApi +import graphql.i18n.I18n +import graphql.language.Document +import graphql.parser.Parser +import graphql.validation.LanguageTraversal +import graphql.validation.RulesVisitor +import graphql.validation.SpecValidationSchema +import graphql.validation.ValidationContext +import graphql.validation.ValidationErrorCollector +import graphql.validation.ValidationErrorType +import spock.lang.Specification + +class DeferDirectiveOnRootLevelTest extends Specification { + + ValidationErrorCollector errorCollector = new ValidationErrorCollector() + + def traverse(String query) { + Document document = new Parser().parseDocument(query) + ValidationContext validationContext = new ValidationContext( + SpecValidationSchema.specValidationSchema, + document, + I18n.i18n(I18n.BundleType.Validation, Locale.ENGLISH)) + validationContext.getGraphQLContext().put(ExperimentalApi.ENABLE_INCREMENTAL_SUPPORT, true) + + LanguageTraversal languageTraversal = new LanguageTraversal() + languageTraversal.traverse(document, new RulesVisitor(validationContext, [new DeferDirectiveOnRootLevel(validationContext, errorCollector)])) + } + + + def "Not allow defer on subscription root level"() { + given: + def query = """ + subscription pets { + ... @defer { + dog { + name + } + } + } + """ + + when: + traverse(query) + + then: + !errorCollector.errors.isEmpty() + errorCollector.containsValidationError(ValidationErrorType.MisplacedDirective) + + } + + def "Not allow defer mutation root level "() { + given: + def query = """ + mutation dog { + ... @defer { + createDog(input: {id: "1"}) { + name + } + } + } + """ + + + when: + traverse(query) + + then: + !errorCollector.errors.isEmpty() + errorCollector.errors.size() == 1 + errorCollector.containsValidationError(ValidationErrorType.MisplacedDirective) + errorCollector.errors.get(0).message == "Validation error (MisplacedDirective) : Defer directive cannot be used on root mutation type 'PetMutationType'" + + } + + def "Defer directive is allowed on query root level"() { + given: + def query = """ + query defer_query { + ... @defer { + dog { + name + } + } + } + """ + when: + traverse(query) + + then: + errorCollector.errors.isEmpty() + } + + def "Not allow defer mutation root level on inline fragments "() { + given: + def query = """ + mutation doggo { + ... { + ... @defer { + createDog(input: {id: "1"}) { + name + } + } + + } + } + """ + when: + traverse(query) + + then: + !errorCollector.errors.isEmpty() + errorCollector.errors.size() == 1 + errorCollector.containsValidationError(ValidationErrorType.MisplacedDirective) + errorCollector.errors.get(0).message == "Validation error (MisplacedDirective) : Defer directive cannot be used on root mutation type 'PetMutationType'" + } + + def "Not allow defer on subscription root level even when is inside multiple inline fragment"() { + given: + def query = """ + subscription pets { + ...{ + ...{ + ... @defer { + dog { + name + } + } + } + } + } + """ + when: + traverse(query) + + then: + !errorCollector.errors.isEmpty() + errorCollector.errors.size() == 1 + errorCollector.containsValidationError(ValidationErrorType.MisplacedDirective) + errorCollector.errors.get(0).message == "Validation error (MisplacedDirective) : Defer directive cannot be used on root subscription type 'SubscriptionRoot'" + + } + + + def "Not allow defer on mutation root level even when ih multiple inline fragments split in fragment"() { + given: + def query = """ + fragment doggo on PetMutationType { + ... { + ... @defer { + createDog(id: "1") { + id + } + } + } + } + + mutation doggoMutation { + ...{ + ...doggo + } + } + + + """ + when: + traverse(query) + + then: + !errorCollector.errors.isEmpty() + errorCollector.errors.size() == 1 + errorCollector.containsValidationError(ValidationErrorType.MisplacedDirective) + errorCollector.errors.get(0).message == "Validation error (MisplacedDirective@[doggo]) : Defer directive cannot be used on root mutation type 'PetMutationType'" + } + + + def "Allows defer on mutation when it is not on root level"() { + given: + def query = """ + mutation pets { + createDog(input: {id: "1"}) { + ... @defer { + name + } + } + } + """ + Document document = new Parser().parseDocument(query) + LanguageTraversal languageTraversal = new LanguageTraversal() + + when: + traverse(query) + + then: + errorCollector.errors.isEmpty() + } + + def "allow defer on fragment when is not on mutation root level"() { + given: + def query = """ + mutation doggo { + ...{ + createDog(id: "1") { + ...doggo + } + } + } + + fragment doggo on Dog { + ... @defer { + id + } + } + + """ + when: + traverse(query) + + then: + errorCollector.errors.isEmpty() + } + + + def "allow defer on split fragment when is not on mutation root level"() { + given: + def query = """ + mutation doggo { + ...doggoCreate + } + + fragment doggoCreate on PetMutationType { + createDog(id: "1") { + ...doggoFields + } + } + + fragment doggoFields on Dog { + ... @defer { + id + } + } + + """ + when: + traverse(query) + + then: + errorCollector.errors.isEmpty() + + } + + + def "Not allow defer subscription root level even when there are multiple subscriptions"() { + given: + def query = """ + subscription pets { + dog { + name + } + } + subscription dog { + ... @defer { + dog { + name + } + } + } + + subscription morePets { + cat { + name + } + } + """ + Document document = new Parser().parseDocument(query) + LanguageTraversal languageTraversal = new LanguageTraversal() + + when: + traverse(query) + + then: + errorCollector.containsValidationError(ValidationErrorType.MisplacedDirective) + errorCollector.errors.size() == 1 + + } + + def "Not allow defer on mutation root level when there are multiple fragment levels regarless fragment order on query"() { + given: + def query = """ + + fragment createDoggoRoot on PetMutationType { + ... { + ...createDoggo + } + } + + mutation createDoggoRootOp { + ...createDoggoRoot + } + + fragment createDoggo on PetMutationType { + ... { + ... @defer { + createDog(input: {id: "1"}) { + name + } + } + } + } + + """ + + when: + traverse(query) + + then: + !errorCollector.errors.isEmpty() + errorCollector.errors.size() == 1 + errorCollector.containsValidationError(ValidationErrorType.MisplacedDirective) + errorCollector.errors.get(0).message == "Validation error (MisplacedDirective@[createDoggoRoot/createDoggo]) : Defer directive cannot be used on root mutation type 'PetMutationType'" + + } + + def "Not allow defer on mutation root level even when there are multiple fragments and operations"() { + given: + def query = """ + + fragment createDoggoLevel1 on PetMutationType { + ... { + ... { + ...createDoggoLevel2 + } + } + } + + fragment createDoggoLevel2 on PetMutationType { + ...createDoggo + } + + fragment createDoggo on PetMutationType { + ... { + ... @defer { + createDog(input: {id: "1"}) { + name + } + } + } + } + + query pets1 { + ... @defer { + dog { + name + } + } + } + + mutation createDoggo { + ...createDoggoLevel1 + } + + """ + + when: + traverse(query) + + then: + !errorCollector.errors.isEmpty() + errorCollector.errors.size() == 1 + errorCollector.containsValidationError(ValidationErrorType.MisplacedDirective) + errorCollector.errors.get(0).message == "Validation error (MisplacedDirective@[createDoggoLevel1/createDoggoLevel2/createDoggo]) : Defer directive cannot be used on root mutation type 'PetMutationType'" + + } + + + def "Not allow defer on subscription root level even when defer(if == false) "() { + given: + def query = """ + subscription pets{ + ... @defer(if:false) { + dog { + + name + } + nickname + } + } + """ + Document document = new Parser().parseDocument(query) + LanguageTraversal languageTraversal = new LanguageTraversal()\ + + when: + traverse(query) + + then: + !errorCollector.errors.isEmpty() + errorCollector.errors.size() == 1 + errorCollector.containsValidationError(ValidationErrorType.MisplacedDirective) + errorCollector.errors.get(0).message == "Validation error (MisplacedDirective) : Defer directive cannot be used on root subscription type 'SubscriptionRoot'" + + } + + def "Not allow defer on subscription root level when defer(if == true) "() { + given: + def query = """ + subscription pets{ + ... @defer(if:true) { + dog { + + name + } + nickname + } + } + """ + + when: + traverse(query) + + then: + errorCollector.errors.size() == 1 + errorCollector.containsValidationError(ValidationErrorType.MisplacedDirective) + errorCollector.errors.get(0).message == "Validation error (MisplacedDirective) : Defer directive cannot be used on root subscription type 'SubscriptionRoot'" + + } + + def "Not allow defer on mutation root level even when if is variable that could have false as value "() { + given: + def query = """ + mutation pets(\$ifVar:Boolean){ + ... @defer(if:\$ifVar) { + createDog(input: {id: "1"}) { + name + } + } + + } + """ + + when: + traverse(query) + + then: + errorCollector.errors.size() == 1 + errorCollector.containsValidationError(ValidationErrorType.MisplacedDirective) + errorCollector.errors.get(0).message == "Validation error (MisplacedDirective) : Defer directive cannot be used on root mutation type 'PetMutationType'" + } + + def "Not allow defer on mutation root level when defer(if == true) "() { + given: + def query = """ + mutation pets{ + ... @defer(if:true) { + createDog(input: {id: "1"}) { + name + } + } + } + """ + + when: + traverse(query) + + then: + errorCollector.errors.size() == 1 + errorCollector.containsValidationError(ValidationErrorType.MisplacedDirective) + errorCollector.errors.get(0).message == "Validation error (MisplacedDirective) : Defer directive cannot be used on root mutation type 'PetMutationType'" + + } + +} + diff --git a/src/test/groovy/graphql/validation/rules/DeferDirectiveOnValidOperationTest.groovy b/src/test/groovy/graphql/validation/rules/DeferDirectiveOnValidOperationTest.groovy new file mode 100644 index 0000000000..1430b7743b --- /dev/null +++ b/src/test/groovy/graphql/validation/rules/DeferDirectiveOnValidOperationTest.groovy @@ -0,0 +1,366 @@ +package graphql.validation.rules + +import graphql.ExperimentalApi +import graphql.i18n.I18n +import graphql.language.Document +import graphql.parser.Parser +import graphql.validation.LanguageTraversal +import graphql.validation.RulesVisitor +import graphql.validation.SpecValidationSchema +import graphql.validation.ValidationContext +import graphql.validation.ValidationErrorCollector +import graphql.validation.ValidationErrorType +import spock.lang.Specification + +class DeferDirectiveOnValidOperationTest extends Specification { + ValidationErrorCollector errorCollector = new ValidationErrorCollector() + + def traverse(String query) { + Document document = new Parser().parseDocument(query) + I18n i18n = I18n.i18n(I18n.BundleType.Validation, Locale.ENGLISH) + ValidationContext validationContext = new ValidationContext(SpecValidationSchema.specValidationSchema, document, i18n) + validationContext.getGraphQLContext().put(ExperimentalApi.ENABLE_INCREMENTAL_SUPPORT, true) + LanguageTraversal languageTraversal = new LanguageTraversal() + languageTraversal.traverse(document, new RulesVisitor(validationContext, [new DeferDirectiveOnValidOperation(validationContext, errorCollector)])) + } + + def "Allow simple defer on query with fragment definition"() { + def query = ''' + query { + dog { + ... DogFields @defer + } + } + + fragment DogFields on Dog { + name + } + ''' + + when: + traverse(query) + + then: + errorCollector.errors.isEmpty() + } + + def "Allow simple defer on mutation with fragment definition"() { + def query = ''' + mutation { + createDog(input: {name: "Fido"}) { + ... DogFields @defer + } + } + + fragment DogFields on Dog { + name + } + ''' + + when: + traverse(query) + + then: + errorCollector.errors.isEmpty() + } + + def "Not allow defer on subscription operation"() { + given: + def query = """ + subscription pets { + dog { + ... @defer { + name + } + nickname + } + } + """ + + + when: + traverse(query) + + then: + !errorCollector.errors.isEmpty() + errorCollector.containsValidationError(ValidationErrorType.MisplacedDirective) + + } + + + def "Allow defer(if:false) on subscription operation"() { + given: + def query = """ + subscription pets { + dog { + ... @defer(if:false) { + name + } + nickname + } + } + """ + + + when: + traverse(query) + + then: + errorCollector.errors.isEmpty() + + } + + def "Not allow simple defer on subscription with fragment definition"() { + def query = ''' + subscription { + dog { + ... DogFields @defer + } + } + + fragment DogFields on Dog { + name + } + ''' + + when: + traverse(query) + + then: + !errorCollector.errors.isEmpty() + errorCollector.containsValidationError(ValidationErrorType.MisplacedDirective) + + } + + def "Not allow defer on fragment when operation is subscription"() { + given: + def query = """ + fragment doggo on PetMutationType { + ... { + dog { + ... @defer { + id + } + nickname + } + + } + } + + subscription doggoMutation { + ...{ + ...doggo + } + } + + + """ + when: + traverse(query) + + then: + !errorCollector.errors.isEmpty() + errorCollector.containsValidationError(ValidationErrorType.MisplacedDirective) + + } + + def "Allow defer(if:false) on fragment when operation is subscription"() { + given: + def query = """ + fragment doggo on PetMutationType { + ... { + dog { + ... @defer(if:false) { + id + } + nickname + } + + } + } + + subscription doggoMutation { + ...{ + ...doggo + } + } + + + """ + when: + traverse(query) + + then: + errorCollector.errors.isEmpty() + + } + + def "Not allow defer subscription even when there are multiple operations with multiple fragments"() { + given: + def query = """ + + fragment doggoSubscription on SubscriptionRoot { + ... { + dog { + ...doggo + } + } + } + + query pets { + ... @defer { + dog { + name + } + } + } + + subscription pets2 { + ...doggoSubscription + } + + query pets3 { + dog { + name + } + } + + fragment doggo on Dog{ + ... @defer { + name + } + } + """ + + when: + traverse(query) + + then: + !errorCollector.errors.isEmpty() + errorCollector.errors.size() == 1 + errorCollector.errors.get(0).getValidationErrorType() == ValidationErrorType.MisplacedDirective + errorCollector.errors.get(0).message == "Validation error (MisplacedDirective@[doggoSubscription/dog/doggo]) : Directive 'defer' is not allowed to be used on operation subscription" + + } + + + def "Not allow defer subscription even when there are multiple operations and multiple fragments"() { + given: + def query = """ + query pets { + ... @defer { + dog { + name + } + } + } + + subscription pets2 { + dog { + ... @defer { + name + } + } + } + + + """ + + when: + traverse(query) + + then: + !errorCollector.errors.isEmpty() + errorCollector.errors.size() == 1 + errorCollector.errors.get(0).getValidationErrorType() == ValidationErrorType.MisplacedDirective + errorCollector.errors.get(0).message == "Validation error (MisplacedDirective@[dog]) : Directive 'defer' is not allowed to be used on operation subscription" + + } + + def "Allows defer on mutation when it is not on root level"() { + given: + def query = """ + mutation pets { + dog { + ... @defer { + name + } + } + } + """ + when: + traverse(query) + + then: + errorCollector.errors.isEmpty() + } + + + def "Allow defer on subscription when defer(if == false) "() { + given: + def query = """ + subscription pets{ + dog { + ... @defer(if:false) { + name + } + nickname + } + } + """ + + when: + traverse(query) + + then: + errorCollector.errors.isEmpty() + + } + + def "Not allow defer on subscription when defer(if == true) "() { + given: + def query = """ + subscription pets{ + dog { + ... @defer(if:true) { + name + } + nickname + } + } + """ + + when: + traverse(query) + + then: + errorCollector.errors.size() == 1 + errorCollector.containsValidationError(ValidationErrorType.MisplacedDirective) + errorCollector.errors.get(0).message == "Validation error (MisplacedDirective@[dog]) : Directive 'defer' is not allowed to be used on operation subscription" + + + } + + def "Allow defer when if is variable that could have false as value "() { + given: + def query = """ + subscription pets(\$ifVar:Boolean){ + dog { + ... @defer(if:\$ifVar) { + name + } + nickname + } + } + """ + + when: + traverse(query) + + then: + errorCollector.errors.isEmpty() + } + + + +} \ No newline at end of file diff --git a/src/test/groovy/graphql/validation/rules/KnownArgumentNamesTest.groovy b/src/test/groovy/graphql/validation/rules/KnownArgumentNamesTest.groovy index 302f34b7a1..e437b43eda 100644 --- a/src/test/groovy/graphql/validation/rules/KnownArgumentNamesTest.groovy +++ b/src/test/groovy/graphql/validation/rules/KnownArgumentNamesTest.groovy @@ -1,5 +1,6 @@ package graphql.validation.rules +import graphql.introspection.Introspection import graphql.language.Argument import graphql.language.BooleanValue import graphql.language.StringValue @@ -52,7 +53,9 @@ class KnownArgumentNamesTest extends Specification { given: Argument argument = Argument.newArgument("unknownArg", BooleanValue.newBooleanValue(true).build()).build() def fieldDefinition = GraphQLFieldDefinition.newFieldDefinition().name("field").type(GraphQLString).build() - def directiveDefinition = GraphQLDirective.newDirective().name("directive") + def directiveDefinition = GraphQLDirective.newDirective() + .name("directive") + .validLocation(Introspection.DirectiveLocation.FIELD_DEFINITION) .argument(GraphQLArgument.newArgument().name("knownArg").type(GraphQLBoolean).build()).build() validationContext.getFieldDef() >> fieldDefinition validationContext.getDirective() >> directiveDefinition @@ -66,7 +69,9 @@ class KnownArgumentNamesTest extends Specification { given: Argument argument = Argument.newArgument("knownArg", BooleanValue.newBooleanValue(true).build()).build() def fieldDefinition = GraphQLFieldDefinition.newFieldDefinition().name("field").type(GraphQLString).build() - def directiveDefinition = GraphQLDirective.newDirective().name("directive") + def directiveDefinition = GraphQLDirective.newDirective() + .name("directive") + .validLocation(Introspection.DirectiveLocation.FIELD_DEFINITION) .argument(GraphQLArgument.newArgument().name("knownArg").type(GraphQLBoolean).build()).build() validationContext.getFieldDef() >> fieldDefinition validationContext.getDirective() >> directiveDefinition @@ -81,7 +86,9 @@ class KnownArgumentNamesTest extends Specification { Argument argument = Argument.newArgument("unknownArg", BooleanValue.newBooleanValue(true).build()).build() def fieldDefinition = GraphQLFieldDefinition.newFieldDefinition().name("field").type(GraphQLString) .argument(GraphQLArgument.newArgument().name("unknownArg").type(GraphQLString).build()).build() - def directiveDefinition = GraphQLDirective.newDirective().name("directive") + def directiveDefinition = GraphQLDirective.newDirective() + .name("directive") + .validLocation(Introspection.DirectiveLocation.FIELD_DEFINITION) .argument(GraphQLArgument.newArgument().name("knownArg").type(GraphQLBoolean).build()).build() validationContext.getFieldDef() >> fieldDefinition validationContext.getDirective() >> directiveDefinition diff --git a/src/test/groovy/graphql/validation/rules/ProvidedNonNullArgumentsTest.groovy b/src/test/groovy/graphql/validation/rules/ProvidedNonNullArgumentsTest.groovy index 925f6603eb..9a11066973 100644 --- a/src/test/groovy/graphql/validation/rules/ProvidedNonNullArgumentsTest.groovy +++ b/src/test/groovy/graphql/validation/rules/ProvidedNonNullArgumentsTest.groovy @@ -1,5 +1,6 @@ package graphql.validation.rules +import graphql.introspection.Introspection import graphql.language.Argument import graphql.language.Directive import graphql.language.Field @@ -111,6 +112,7 @@ class ProvidedNonNullArgumentsTest extends Specification { .name("arg").type(GraphQLNonNull.nonNull(GraphQLString)) def graphQLDirective = GraphQLDirective.newDirective() .name("directive") + .validLocation(Introspection.DirectiveLocation.SCALAR) .argument(directiveArg) .build() validationContext.getDirective() >> graphQLDirective @@ -149,6 +151,7 @@ class ProvidedNonNullArgumentsTest extends Specification { .defaultValueProgrammatic("defaultVal") def graphQLDirective = GraphQLDirective.newDirective() .name("directive") + .validLocation(Introspection.DirectiveLocation.SCALAR) .argument(directiveArg) .build() validationContext.getDirective() >> graphQLDirective @@ -167,6 +170,7 @@ class ProvidedNonNullArgumentsTest extends Specification { def directiveArg = GraphQLArgument.newArgument().name("arg").type(GraphQLNonNull.nonNull(GraphQLString)) def graphQLDirective = GraphQLDirective.newDirective() .name("directive") + .validLocation(Introspection.DirectiveLocation.SCALAR) .argument(directiveArg) .build() validationContext.getDirective() >> graphQLDirective diff --git a/src/test/groovy/graphql/validation/rules/SubscriptionUniqueRootFieldTest.groovy b/src/test/groovy/graphql/validation/rules/SubscriptionUniqueRootFieldTest.groovy index 1e2d72756f..9b171f2256 100644 --- a/src/test/groovy/graphql/validation/rules/SubscriptionUniqueRootFieldTest.groovy +++ b/src/test/groovy/graphql/validation/rules/SubscriptionUniqueRootFieldTest.groovy @@ -91,7 +91,7 @@ class SubscriptionUniqueRootFieldTest extends Specification { !validationErrors.empty validationErrors.size() == 1 validationErrors[0].validationErrorType == ValidationErrorType.SubscriptionMultipleRootFields - validationErrors[0].message == "Validation error (SubscriptionMultipleRootFields) : Subscription operation 'whoIsAGoodBoy' must have exactly one root field with fragments" + validationErrors[0].message == "Validation error (SubscriptionMultipleRootFields) : Subscription operation 'whoIsAGoodBoy' must have exactly one root field" } def "5.2.3.1 document can contain multiple operations with different root fields"() { @@ -151,9 +151,141 @@ class SubscriptionUniqueRootFieldTest extends Specification { !validationErrors.empty validationErrors.size() == 1 validationErrors[0].validationErrorType == ValidationErrorType.SubscriptionIntrospectionRootField - validationErrors[0].message == "Validation error (SubscriptionIntrospectionRootField) : Subscription operation 'doggo' fragment root field '__typename' cannot be an introspection field" + validationErrors[0].message == "Validation error (SubscriptionIntrospectionRootField) : Subscription operation 'doggo' root field '__typename' cannot be an introspection field" + } + + def "5.2.3.1 subscription with multiple root fields within inline fragment are not allowed"() { + given: + def subscriptionOneRootWithFragment = ''' + subscription doggo { + ... { + dog { + name + } + cat { + name + } + } + } + ''' + + when: + def validationErrors = validate(subscriptionOneRootWithFragment) + + then: + !validationErrors.empty + validationErrors.size() == 1 + validationErrors[0].validationErrorType == ValidationErrorType.SubscriptionMultipleRootFields + validationErrors[0].message == "Validation error (SubscriptionMultipleRootFields) : Subscription operation 'doggo' must have exactly one root field" + } + + + def "5.2.3.1 subscription with more than one root field with multiple fragment fails validation"() { + given: + def subscriptionTwoRootsWithFragment = ''' + fragment doggoRoot on SubscriptionRoot { + ...doggoLevel1 + } + + fragment doggoLevel1 on SubscriptionRoot { + ...doggoLevel2 + } + + fragment doggoLevel2 on SubscriptionRoot { + dog { + name + } + cat { + name + } + } + + subscription whoIsAGoodBoy { + ...doggoRoot + } + ''' + when: + def validationErrors = validate(subscriptionTwoRootsWithFragment) + + then: + !validationErrors.empty + validationErrors.size() == 1 + validationErrors[0].validationErrorType == ValidationErrorType.SubscriptionMultipleRootFields + validationErrors[0].message == "Validation error (SubscriptionMultipleRootFields) : Subscription operation 'whoIsAGoodBoy' must have exactly one root field" } + + def "5.2.3.1 subscription with more than one root field with multiple fragment with inline fragments fails validation"() { + given: + def subscriptionTwoRootsWithFragment = ''' + fragment doggoRoot on SubscriptionRoot { + ...doggoLevel1 + } + + fragment doggoLevel1 on SubscriptionRoot { + ...{ + ...doggoLevel2 + } + } + + fragment doggoLevel2 on SubscriptionRoot { + ...{ + dog { + name + } + cat { + name + } + } + } + + subscription whoIsAGoodBoy { + ...doggoRoot + } + ''' + when: + def validationErrors = validate(subscriptionTwoRootsWithFragment) + + then: + !validationErrors.empty + validationErrors.size() == 1 + validationErrors[0].validationErrorType == ValidationErrorType.SubscriptionMultipleRootFields + validationErrors[0].message == "Validation error (SubscriptionMultipleRootFields) : Subscription operation 'whoIsAGoodBoy' must have exactly one root field" + } + + + def "5.2.3.1 subscription with one root field with multiple fragment with inline fragments does not fail validation"() { + given: + def subscriptionTwoRootsWithFragment = ''' + fragment doggoRoot on SubscriptionRoot { + ...doggoLevel1 + } + + fragment doggoLevel1 on SubscriptionRoot { + ...{ + ...doggoLevel2 + } + } + + fragment doggoLevel2 on SubscriptionRoot { + ...{ + dog { + name + } + + } + } + + subscription whoIsAGoodBoy { + ...doggoRoot + } + ''' + when: + def validationErrors = validate(subscriptionTwoRootsWithFragment) + + then: + validationErrors.empty + } static List validate(String query) { def document = new Parser().parseDocument(query) return new Validator().validateDocument(SpecValidationSchema.specValidationSchema, document, Locale.ENGLISH) diff --git a/src/test/groovy/graphql/validation/rules/UniqueOperationNamesTest.groovy b/src/test/groovy/graphql/validation/rules/UniqueOperationNamesTest.groovy index 1fa03eb24c..c5fa1eb3d2 100644 --- a/src/test/groovy/graphql/validation/rules/UniqueOperationNamesTest.groovy +++ b/src/test/groovy/graphql/validation/rules/UniqueOperationNamesTest.groovy @@ -46,8 +46,8 @@ class UniqueOperationNamesTest extends Specification { } mutation dogOperation { - mutateDog { - id + createDog(input: {id: "1"}) { + name } } """.stripIndent() diff --git a/src/test/groovy/readme/DataLoaderBatchingExamples.java b/src/test/groovy/readme/DataLoaderBatchingExamples.java index 8d13a4581f..287d4c5650 100644 --- a/src/test/groovy/readme/DataLoaderBatchingExamples.java +++ b/src/test/groovy/readme/DataLoaderBatchingExamples.java @@ -3,8 +3,6 @@ import graphql.ExecutionInput; import graphql.ExecutionResult; import graphql.GraphQL; -import graphql.execution.instrumentation.dataloader.DataLoaderDispatcherInstrumentation; -import graphql.execution.instrumentation.dataloader.DataLoaderDispatcherInstrumentationOptions; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; import graphql.schema.GraphQLSchema; @@ -84,13 +82,6 @@ public Object get(DataFetchingEnvironment environment) { // as each level of the graphql query is executed and hence make batched objects // available to the query and the associated DataFetchers // - // In this case we use options to make it keep statistics on the batching efficiency - // - DataLoaderDispatcherInstrumentationOptions options = DataLoaderDispatcherInstrumentationOptions - .newOptions().includeStatistics(true); - - DataLoaderDispatcherInstrumentation dispatcherInstrumentation - = new DataLoaderDispatcherInstrumentation(options); // // now build your graphql object and execute queries on it. @@ -98,7 +89,6 @@ public Object get(DataFetchingEnvironment environment) { // schema fields // GraphQL graphQL = GraphQL.newGraphQL(buildSchema()) - .instrumentation(dispatcherInstrumentation) .build(); // diff --git a/src/test/groovy/readme/IncrementalExamples.java b/src/test/groovy/readme/IncrementalExamples.java new file mode 100644 index 0000000000..3b7d1ed7be --- /dev/null +++ b/src/test/groovy/readme/IncrementalExamples.java @@ -0,0 +1,102 @@ +package readme; + +import graphql.Directives; +import graphql.ExecutionInput; +import graphql.ExecutionResult; +import graphql.GraphQL; +import graphql.incremental.DelayedIncrementalPartialResult; +import graphql.incremental.IncrementalExecutionResult; +import graphql.schema.GraphQLSchema; +import jakarta.servlet.http.HttpServletResponse; +import org.reactivestreams.Publisher; +import org.reactivestreams.Subscriber; +import org.reactivestreams.Subscription; + +@SuppressWarnings({"unused", "ConstantConditions"}) +public class IncrementalExamples { + + GraphQLSchema buildSchemaWithDirective() { + + GraphQLSchema schema = buildSchema(); + schema = schema.transform(builder -> + builder.additionalDirective(Directives.DeferDirective) + ); + return schema; + } + + void basicExample(HttpServletResponse httpServletResponse, String deferredQuery) { + GraphQLSchema schema = buildSchemaWithDirective(); + GraphQL graphQL = GraphQL.newGraphQL(schema).build(); + + // + // deferredQuery contains the query with @defer directives in it + // + ExecutionResult initialResult = graphQL.execute(ExecutionInput.newExecutionInput().query(deferredQuery).build()); + + if (!(initialResult instanceof IncrementalExecutionResult)) { + // handle non incremental response + return; + } + + IncrementalExecutionResult incrementalResult = (IncrementalExecutionResult) initialResult; + + // + // then initial results happen first, the incremental ones will begin AFTER these initial + // results have completed + // + sendMultipartHttpResult(httpServletResponse, initialResult); + + Publisher delayedIncrementalResults = incrementalResult + .getIncrementalItemPublisher(); + + // + // you subscribe to the incremental results like any other reactive stream + // + delayedIncrementalResults.subscribe(new Subscriber<>() { + + Subscription subscription; + + @Override + public void onSubscribe(Subscription s) { + subscription = s; + // + // how many you request is up to you + subscription.request(10); + } + + @Override + public void onNext(DelayedIncrementalPartialResult executionResult) { + // + // as each deferred result arrives, send it to where it needs to go + // + sendMultipartHttpResult(httpServletResponse, executionResult); + subscription.request(10); + } + + @Override + public void onError(Throwable t) { + handleError(httpServletResponse, t); + } + + @Override + public void onComplete() { + completeResponse(httpServletResponse); + } + }); + } + + private void completeResponse(HttpServletResponse httpServletResponse) { + } + + private void handleError(HttpServletResponse httpServletResponse, Throwable t) { + } + + private void sendMultipartHttpResult(HttpServletResponse httpServletResponse, Object result) { + } + + + private GraphQLSchema buildSchema() { + return null; + } + +} diff --git a/src/test/groovy/readme/InstrumentationExamples.java b/src/test/groovy/readme/InstrumentationExamples.java index f404ca4ee5..60ad4dc4dc 100644 --- a/src/test/groovy/readme/InstrumentationExamples.java +++ b/src/test/groovy/readme/InstrumentationExamples.java @@ -64,13 +64,14 @@ void recordTiming(String key, long time) { } class CustomInstrumentation extends SimplePerformantInstrumentation { + @Override - public @Nullable InstrumentationState createState(InstrumentationCreateStateParameters parameters) { + public @Nullable CompletableFuture createStateAsync(InstrumentationCreateStateParameters parameters) { // // instrumentation state is passed during each invocation of an Instrumentation method // and allows you to put stateful data away and reference it during the query execution // - return new CustomInstrumentationState(); + return CompletableFuture.completedFuture(new CustomInstrumentationState()); } @Override diff --git a/src/test/groovy/readme/ScalarExamples.java b/src/test/groovy/readme/ScalarExamples.java index 5d76dda856..601d949d67 100644 --- a/src/test/groovy/readme/ScalarExamples.java +++ b/src/test/groovy/readme/ScalarExamples.java @@ -1,12 +1,16 @@ package readme; +import graphql.GraphQLContext; +import graphql.execution.CoercedVariables; import graphql.language.StringValue; +import graphql.language.Value; import graphql.schema.Coercing; import graphql.schema.CoercingParseLiteralException; import graphql.schema.CoercingParseValueException; import graphql.schema.CoercingSerializeException; import graphql.schema.GraphQLScalarType; +import java.util.Locale; import java.util.regex.Pattern; @SuppressWarnings("unused") @@ -19,17 +23,17 @@ public static class EmailScalar { .description("A custom scalar that handles emails") .coercing(new Coercing() { @Override - public Object serialize(Object dataFetcherResult) { + public Object serialize(Object dataFetcherResult, GraphQLContext graphQLContext, Locale locale) { return serializeEmail(dataFetcherResult); } @Override - public Object parseValue(Object input) { + public Object parseValue(Object input, GraphQLContext graphQLContext, Locale locale) { return parseEmailFromVariable(input); } @Override - public Object parseLiteral(Object input) { + public Object parseLiteral(Value input, CoercedVariables variables, GraphQLContext graphQLContext, Locale locale) { return parseEmailFromAstLiteral(input); } }) @@ -73,5 +77,4 @@ private static Object parseEmailFromAstLiteral(Object input) { } } - } diff --git a/src/test/java/benchmark/AddError.java b/src/test/java/benchmark/AddError.java deleted file mode 100644 index 950b756ab3..0000000000 --- a/src/test/java/benchmark/AddError.java +++ /dev/null @@ -1,38 +0,0 @@ -package benchmark; - -import graphql.execution.ExecutionContext; -import graphql.execution.ExecutionContextBuilder; -import graphql.execution.ExecutionId; -import graphql.execution.ResultPath; -import graphql.schema.idl.errors.SchemaMissingError; -import org.openjdk.jmh.annotations.Benchmark; -import org.openjdk.jmh.annotations.BenchmarkMode; -import org.openjdk.jmh.annotations.Measurement; -import org.openjdk.jmh.annotations.Mode; -import org.openjdk.jmh.annotations.Scope; -import org.openjdk.jmh.annotations.State; -import org.openjdk.jmh.annotations.Warmup; - -import java.util.Collections; - -@State(Scope.Benchmark) -public class AddError { - - private final ExecutionContext context = new ExecutionContextBuilder() - .executionId(ExecutionId.generate()) - .build(); - - private volatile int x = 0; - - @Benchmark - @BenchmarkMode(Mode.SingleShotTime) - @Warmup(iterations = 1, batchSize = 50000) - @Measurement(iterations = 1, batchSize = 5000) - public ExecutionContext benchMarkAddError() { - context.addError( - new SchemaMissingError(), - ResultPath.fromList(Collections.singletonList(x++)) - ); - return context; - } -} diff --git a/src/test/java/benchmark/AssertBenchmark.java b/src/test/java/benchmark/AssertBenchmark.java new file mode 100644 index 0000000000..04a11c03b2 --- /dev/null +++ b/src/test/java/benchmark/AssertBenchmark.java @@ -0,0 +1,90 @@ +package benchmark; + +import graphql.Assert; +import org.openjdk.jmh.annotations.Benchmark; +import org.openjdk.jmh.annotations.BenchmarkMode; +import org.openjdk.jmh.annotations.Fork; +import org.openjdk.jmh.annotations.Measurement; +import org.openjdk.jmh.annotations.Mode; +import org.openjdk.jmh.annotations.OutputTimeUnit; +import org.openjdk.jmh.annotations.Warmup; +import org.openjdk.jmh.runner.Runner; +import org.openjdk.jmh.runner.RunnerException; +import org.openjdk.jmh.runner.options.Options; +import org.openjdk.jmh.runner.options.OptionsBuilder; + +import java.util.Random; +import java.util.concurrent.TimeUnit; + +@Warmup(iterations = 2, time = 5, batchSize = 50) +@Measurement(iterations = 3, batchSize = 50) +@Fork(3) +public class AssertBenchmark { + + private static final int LOOPS = 100; + private static final boolean BOOL = new Random().nextBoolean(); + + @Benchmark + @BenchmarkMode(Mode.Throughput) + @OutputTimeUnit(TimeUnit.MILLISECONDS) + public void benchMarkAssertWithString() { + for (int i = 0; i < LOOPS; i++) { + Assert.assertTrue(jitTrue(), "This string is constant"); + } + } + + @Benchmark + @BenchmarkMode(Mode.Throughput) + @OutputTimeUnit(TimeUnit.MILLISECONDS) + public void benchMarkAssertWithStringSupplier() { + for (int i = 0; i < LOOPS; i++) { + Assert.assertTrue(jitTrue(), () -> "This string is constant"); + } + } + + @Benchmark + @BenchmarkMode(Mode.Throughput) + @OutputTimeUnit(TimeUnit.MILLISECONDS) + public void benchMarkAssertWithStringSupplierFormatted() { + for (int i = 0; i < LOOPS; i++) { + final int captured = i; + Assert.assertTrue(jitTrue(), () -> String.format("This string is not constant %d", captured)); + } + } + + @Benchmark + @BenchmarkMode(Mode.Throughput) + @OutputTimeUnit(TimeUnit.MILLISECONDS) + public void benchMarkAssertWithStringFormatted() { + for (int i = 0; i < LOOPS; i++) { + Assert.assertTrue(jitTrue(), "This string is not constant %d", i); + } + } + + private boolean jitTrue() { + // can you jit this away, Mr JIT?? + //noinspection ConstantValue,SimplifiableConditionalExpression + return BOOL ? BOOL : !BOOL; + } + + public static void main(String[] args) throws RunnerException { + runAtStartup(); + Options opt = new OptionsBuilder() + .include("benchmark.AssertBenchmark") + .build(); + + new Runner(opt).run(); + } + + private static void runAtStartup() { + AssertBenchmark benchMark = new AssertBenchmark(); + BenchmarkUtils.runInToolingForSomeTimeThenExit( + () -> { + }, + benchMark::benchMarkAssertWithStringSupplier, + () -> { + } + + ); + } +} diff --git a/src/test/java/benchmark/AstPrinterBenchmark.java b/src/test/java/benchmark/AstPrinterBenchmark.java index 4f4a63d443..fd7f264523 100644 --- a/src/test/java/benchmark/AstPrinterBenchmark.java +++ b/src/test/java/benchmark/AstPrinterBenchmark.java @@ -5,6 +5,7 @@ import graphql.parser.Parser; import org.openjdk.jmh.annotations.Benchmark; import org.openjdk.jmh.annotations.BenchmarkMode; +import org.openjdk.jmh.annotations.Fork; import org.openjdk.jmh.annotations.Measurement; import org.openjdk.jmh.annotations.Mode; import org.openjdk.jmh.annotations.OutputTimeUnit; @@ -13,16 +14,9 @@ import java.util.concurrent.TimeUnit; -/** - * See https://github.com/openjdk/jmh/tree/master/jmh-samples/src/main/java/org/openjdk/jmh/samples/ for more samples - * on what you can do with JMH - *

- * You MUST have the JMH plugin for IDEA in place for this to work : https://github.com/artyushov/idea-jmh-plugin - *

- * Install it and then just hit "Run" on a certain benchmark method - */ -@Warmup(iterations = 2, time = 5, batchSize = 3) -@Measurement(iterations = 3, time = 10, batchSize = 4) +@Warmup(iterations = 2, time = 5) +@Measurement(iterations = 3, time = 10) +@Fork(3) public class AstPrinterBenchmark { /** * Note: this query is a redacted version of a real query diff --git a/src/test/java/benchmark/AsyncBenchmark.java b/src/test/java/benchmark/AsyncBenchmark.java new file mode 100644 index 0000000000..a2fa43addd --- /dev/null +++ b/src/test/java/benchmark/AsyncBenchmark.java @@ -0,0 +1,68 @@ +package benchmark; + +import graphql.execution.Async; +import org.openjdk.jmh.annotations.Benchmark; +import org.openjdk.jmh.annotations.BenchmarkMode; +import org.openjdk.jmh.annotations.Fork; +import org.openjdk.jmh.annotations.Level; +import org.openjdk.jmh.annotations.Measurement; +import org.openjdk.jmh.annotations.Mode; +import org.openjdk.jmh.annotations.Param; +import org.openjdk.jmh.annotations.Scope; +import org.openjdk.jmh.annotations.Setup; +import org.openjdk.jmh.annotations.State; +import org.openjdk.jmh.annotations.Warmup; +import org.openjdk.jmh.runner.Runner; +import org.openjdk.jmh.runner.options.Options; +import org.openjdk.jmh.runner.options.OptionsBuilder; + +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.TimeUnit; + +@State(Scope.Benchmark) +@BenchmarkMode(Mode.Throughput) +@Warmup(iterations = 2) +@Measurement(iterations = 2, timeUnit = TimeUnit.NANOSECONDS) +@Fork(2) +public class AsyncBenchmark { + + @Param({"1", "5", "20"}) + public int numberOfFieldCFs; + + List> futures; + + @Setup(Level.Trial) + public void setUp() throws ExecutionException, InterruptedException { + futures = new ArrayList<>(); + for (int i = 0; i < numberOfFieldCFs; i++) { + futures.add(mkFuture(i)); + } + + } + + private CompletableFuture mkFuture(int i) { + return CompletableFuture.completedFuture(i); + } + + + @Benchmark + @Warmup(iterations = 2, batchSize = 100) + @Measurement(iterations = 2, batchSize = 100) + public List benchmarkAsync() { + Async.CombinedBuilder builder = Async.ofExpectedSize(futures.size()); + futures.forEach(builder::add); + return builder.await().join(); + } + + public static void main(String[] args) throws Exception { + Options opt = new OptionsBuilder() + .include("benchmark.AsyncBenchmark") + .build(); + + new Runner(opt).run(); + } + +} diff --git a/src/test/java/benchmark/BenchmarkUtils.java b/src/test/java/benchmark/BenchmarkUtils.java index fd7897e125..c8dda196d0 100644 --- a/src/test/java/benchmark/BenchmarkUtils.java +++ b/src/test/java/benchmark/BenchmarkUtils.java @@ -1,11 +1,13 @@ package benchmark; -import com.google.common.io.Files; -import graphql.Assert; - -import java.io.File; +import java.io.BufferedReader; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; import java.net.URL; import java.nio.charset.Charset; +import java.time.LocalDateTime; +import java.time.format.DateTimeFormatter; import java.util.concurrent.Callable; public class BenchmarkUtils { @@ -17,7 +19,11 @@ static String loadResource(String name) { if (resource == null) { throw new IllegalArgumentException("missing resource: " + name); } - return String.join("\n", Files.readLines(new File(resource.toURI()), Charset.defaultCharset())); + byte[] bytes; + try (InputStream inputStream = resource.openStream()) { + bytes = inputStream.readAllBytes(); + } + return new String(bytes, Charset.defaultCharset()); }); } @@ -29,4 +35,50 @@ static T asRTE(Callable callable) { } } + public static void runInToolingForSomeTimeThenExit(Runnable setup, Runnable r, Runnable tearDown) { + int runForMillis = getRunForMillis(); + if (runForMillis <= 0) { + System.out.print("'runForMillis' environment var is not set - continuing \n"); + return; + } + System.out.printf("Running initial code in some tooling - runForMillis=%d \n", runForMillis); + System.out.print("Get your tooling in order and press enter..."); + readLine(); + System.out.print("Lets go...\n"); + setup.run(); + + DateTimeFormatter dtf = DateTimeFormatter.ofPattern("HH:mm:ss"); + long now, then = System.currentTimeMillis(); + do { + now = System.currentTimeMillis(); + long msLeft = runForMillis - (now - then); + System.out.printf("\t%s Running in loop... %s ms left\n", dtf.format(LocalDateTime.now()), msLeft); + r.run(); + now = System.currentTimeMillis(); + } while ((now - then) < runForMillis); + + tearDown.run(); + + System.out.printf("This ran for %d millis. Exiting...\n", System.currentTimeMillis() - then); + System.exit(0); + } + + private static void readLine() { + BufferedReader br = new BufferedReader(new InputStreamReader(System.in)); + try { + br.readLine(); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + + private static int getRunForMillis() { + String runFor = System.getenv("runForMillis"); + try { + return Integer.parseInt(runFor); + } catch (NumberFormatException e) { + return -1; + } + } + } diff --git a/src/test/java/benchmark/ChainedInstrumentationBenchmark.java b/src/test/java/benchmark/ChainedInstrumentationBenchmark.java new file mode 100644 index 0000000000..674f424035 --- /dev/null +++ b/src/test/java/benchmark/ChainedInstrumentationBenchmark.java @@ -0,0 +1,85 @@ +package benchmark; + +import graphql.ExecutionInput; +import graphql.execution.instrumentation.ChainedInstrumentation; +import graphql.execution.instrumentation.Instrumentation; +import graphql.execution.instrumentation.InstrumentationState; +import graphql.execution.instrumentation.SimplePerformantInstrumentation; +import graphql.execution.instrumentation.parameters.InstrumentationCreateStateParameters; +import graphql.execution.instrumentation.parameters.InstrumentationExecutionParameters; +import graphql.schema.GraphQLObjectType; +import graphql.schema.GraphQLSchema; +import org.openjdk.jmh.annotations.Benchmark; +import org.openjdk.jmh.annotations.BenchmarkMode; +import org.openjdk.jmh.annotations.Fork; +import org.openjdk.jmh.annotations.Level; +import org.openjdk.jmh.annotations.Measurement; +import org.openjdk.jmh.annotations.Mode; +import org.openjdk.jmh.annotations.Param; +import org.openjdk.jmh.annotations.Scope; +import org.openjdk.jmh.annotations.Setup; +import org.openjdk.jmh.annotations.State; +import org.openjdk.jmh.annotations.Warmup; +import org.openjdk.jmh.runner.Runner; +import org.openjdk.jmh.runner.options.Options; +import org.openjdk.jmh.runner.options.OptionsBuilder; + +import java.util.Collections; +import java.util.List; +import java.util.concurrent.ExecutionException; + +import static graphql.Scalars.GraphQLString; +import static graphql.schema.GraphQLFieldDefinition.newFieldDefinition; +import static graphql.schema.GraphQLObjectType.newObject; + +@State(Scope.Benchmark) +@Warmup(iterations = 2, time = 5) +@Measurement(iterations = 3) +@Fork(3) +public class ChainedInstrumentationBenchmark { + + @Param({"0", "1", "10"}) + public int num; + + ChainedInstrumentation chainedInstrumentation; + GraphQLSchema schema; + InstrumentationExecutionParameters parameters; + InstrumentationState instrumentationState; + + @Setup(Level.Trial) + public void setUp() throws ExecutionException, InterruptedException { + GraphQLObjectType queryType = newObject() + .name("benchmarkQuery") + .field(newFieldDefinition() + .type(GraphQLString) + .name("benchmark")) + .build(); + schema = GraphQLSchema.newSchema() + .query(queryType) + .build(); + + ExecutionInput executionInput = ExecutionInput.newExecutionInput().query("benchmark").build(); + InstrumentationCreateStateParameters createStateParameters = new InstrumentationCreateStateParameters(schema, executionInput); + + List instrumentations = Collections.nCopies(num, new SimplePerformantInstrumentation()); + chainedInstrumentation = new ChainedInstrumentation(instrumentations); + instrumentationState = chainedInstrumentation.createStateAsync(createStateParameters).get(); + parameters = new InstrumentationExecutionParameters(executionInput, schema); + } + + @Benchmark + @BenchmarkMode(Mode.Throughput) + public GraphQLSchema benchmarkInstrumentSchema() { + return chainedInstrumentation.instrumentSchema(schema, parameters, instrumentationState); + } + + public static void main(String[] args) throws Exception { + Options opt = new OptionsBuilder() + .include("benchmark.ChainedInstrumentationBenchmark") + .forks(1) + .build(); + + new Runner(opt).run(); + } + +} diff --git a/src/test/java/benchmark/CompletableFuturesBenchmark.java b/src/test/java/benchmark/CompletableFuturesBenchmark.java new file mode 100644 index 0000000000..e22bb1eb7f --- /dev/null +++ b/src/test/java/benchmark/CompletableFuturesBenchmark.java @@ -0,0 +1,108 @@ +package benchmark; + +import com.google.common.collect.ImmutableList; +import org.openjdk.jmh.annotations.Benchmark; +import org.openjdk.jmh.annotations.BenchmarkMode; +import org.openjdk.jmh.annotations.Fork; +import org.openjdk.jmh.annotations.Level; +import org.openjdk.jmh.annotations.Measurement; +import org.openjdk.jmh.annotations.Mode; +import org.openjdk.jmh.annotations.Param; +import org.openjdk.jmh.annotations.Scope; +import org.openjdk.jmh.annotations.Setup; +import org.openjdk.jmh.annotations.State; +import org.openjdk.jmh.annotations.Warmup; +import org.openjdk.jmh.runner.Runner; +import org.openjdk.jmh.runner.options.Options; +import org.openjdk.jmh.runner.options.OptionsBuilder; + +import java.util.Collections; +import java.util.List; +import java.util.concurrent.CompletableFuture; + +@State(Scope.Benchmark) +@Warmup(iterations = 2, time = 1) +@Measurement(iterations = 3, time = 10, batchSize = 10) +@Fork(3) +public class CompletableFuturesBenchmark { + + + @Param({"2", "5"}) + public int depth; + public int howMany = 10; + + @Setup(Level.Trial) + public void setUp() { + } + + private List> mkCFObjects(int howMany, int depth) { + if (depth <= 0) { + return Collections.emptyList(); + } + ImmutableList.Builder> builder = ImmutableList.builder(); + for (int i = 0; i < howMany; i++) { + CompletableFuture cf = CompletableFuture.completedFuture(mkCFObjects(howMany, depth - 1)); + builder.add(cf); + } + return builder.build(); + } + + private List mkObjects(int howMany, int depth) { + if (depth <= 0) { + return Collections.emptyList(); + } + ImmutableList.Builder builder = ImmutableList.builder(); + for (int i = 0; i < howMany; i++) { + Object obj = mkObjects(howMany, depth - 1); + builder.add(obj); + } + return builder.build(); + } + + @Benchmark + @BenchmarkMode(Mode.Throughput) + public void benchmarkCFApproach() { + // make results + List> completableFutures = mkCFObjects(howMany, depth); + // traverse results + traverseCFS(completableFutures); + } + + @Benchmark + @BenchmarkMode(Mode.Throughput) + public void benchmarkMaterializedApproach() { + // make results + List objects = mkObjects(howMany, depth); + // traverse results + traverseObjects(objects); + } + + @SuppressWarnings("unchecked") + private void traverseCFS(List> completableFutures) { + for (CompletableFuture completableFuture : completableFutures) { + // and when it's done - visit its child results - which are always immediate on completed CFs + // so this whenComplete executed now + completableFuture.whenComplete((list, t) -> { + List> cfs = (List>) list; + traverseCFS(cfs); + }); + } + } + + @SuppressWarnings("unchecked") + private void traverseObjects(List objects) { + for (Object object : objects) { + List list = (List) object; + traverseObjects(list); + } + } + + public static void main(String[] args) throws Exception { + Options opt = new OptionsBuilder() + .include("benchmark.CompletableFuturesBenchmark") + .build(); + + new Runner(opt).run(); + } + +} diff --git a/src/test/java/benchmark/ComplexQueryBenchmark.java b/src/test/java/benchmark/ComplexQueryBenchmark.java new file mode 100644 index 0000000000..530bf7b4aa --- /dev/null +++ b/src/test/java/benchmark/ComplexQueryBenchmark.java @@ -0,0 +1,275 @@ +package benchmark; + +import com.google.common.collect.ImmutableList; +import graphql.ExecutionInput; +import graphql.ExecutionResult; +import graphql.GraphQL; +import graphql.schema.DataFetcher; +import graphql.schema.DataFetchingEnvironment; +import graphql.schema.GraphQLSchema; +import graphql.schema.idl.RuntimeWiring; +import graphql.schema.idl.SchemaGenerator; +import graphql.schema.idl.SchemaParser; +import graphql.schema.idl.TypeDefinitionRegistry; +import org.openjdk.jmh.annotations.Benchmark; +import org.openjdk.jmh.annotations.BenchmarkMode; +import org.openjdk.jmh.annotations.Fork; +import org.openjdk.jmh.annotations.Level; +import org.openjdk.jmh.annotations.Measurement; +import org.openjdk.jmh.annotations.Mode; +import org.openjdk.jmh.annotations.OutputTimeUnit; +import org.openjdk.jmh.annotations.Param; +import org.openjdk.jmh.annotations.Scope; +import org.openjdk.jmh.annotations.Setup; +import org.openjdk.jmh.annotations.State; +import org.openjdk.jmh.annotations.TearDown; +import org.openjdk.jmh.annotations.Warmup; +import org.openjdk.jmh.profile.GCProfiler; +import org.openjdk.jmh.runner.Runner; +import org.openjdk.jmh.runner.options.Options; +import org.openjdk.jmh.runner.options.OptionsBuilder; + +import java.io.BufferedReader; +import java.io.IOException; +import java.io.InputStreamReader; +import java.util.List; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.function.Supplier; + +import static graphql.schema.idl.TypeRuntimeWiring.newTypeWiring; + +/** + * This benchmark is an attempt to have a more complex query that involves async and sync work together + * along with multiple threads happening. + *

+ * It can also be run in a forever mode say if you want to connect a profiler to it say + */ +@State(Scope.Benchmark) +@Warmup(iterations = 2, time = 5) +@Measurement(iterations = 2) +@Fork(2) +public class ComplexQueryBenchmark { + + @Param({"5", "10", "20"}) + int howManyItems = 5; + int howLongToSleep = 5; + int howManyQueries = 10; + int howManyQueryThreads = 10; + int howManyFetcherThreads = 10; + + ExecutorService queryExecutorService; + ExecutorService fetchersExecutorService; + GraphQL graphQL; + volatile boolean shutDown; + + @Setup(Level.Trial) + public void setUp() { + shutDown = false; + queryExecutorService = Executors.newFixedThreadPool(howManyQueryThreads); + fetchersExecutorService = Executors.newFixedThreadPool(howManyFetcherThreads); + graphQL = buildGraphQL(); + } + + @TearDown(Level.Trial) + public void tearDown() { + shutDown = true; + queryExecutorService.shutdownNow(); + fetchersExecutorService.shutdownNow(); + } + + + @Benchmark + @BenchmarkMode(Mode.Throughput) + @OutputTimeUnit(TimeUnit.SECONDS) + public Object benchMarkSimpleQueriesThroughput() { + return runManyQueriesToCompletion(); + } + + + public static void main(String[] args) throws Exception { + // just to make sure it's all valid before testing + runAtStartup(); + + Options opt = new OptionsBuilder() + .include("benchmark.ComplexQueryBenchmark") + .addProfiler(GCProfiler.class) + .build(); + + new Runner(opt).run(); + } + + @SuppressWarnings({"ConstantValue", "LoopConditionNotUpdatedInsideLoop"}) + private static void runAtStartup() { + + ComplexQueryBenchmark complexQueryBenchmark = new ComplexQueryBenchmark(); + complexQueryBenchmark.howManyQueries = 5; + complexQueryBenchmark.howManyItems = 10; + + BenchmarkUtils.runInToolingForSomeTimeThenExit( + complexQueryBenchmark::setUp, + complexQueryBenchmark::runManyQueriesToCompletion, + complexQueryBenchmark::tearDown + + ); + } + + + + @SuppressWarnings("UnnecessaryLocalVariable") + private Void runManyQueriesToCompletion() { + CompletableFuture[] cfs = new CompletableFuture[howManyQueries]; + for (int i = 0; i < howManyQueries; i++) { + cfs[i] = CompletableFuture.supplyAsync(() -> executeQuery(howManyItems, howLongToSleep), queryExecutorService).thenCompose(cf -> cf); + } + Void result = CompletableFuture.allOf(cfs).join(); + return result; + } + + public CompletableFuture executeQuery(int howMany, int howLong) { + String fields = "id name f1 f2 f3 f4 f5 f6 f7 f8 f9 f10"; + String query = "query q {" + + String.format("shops(howMany : %d) { %s departments( howMany : %d) { %s products(howMany : %d) { %s }}}\n" + , howMany, fields, 10, fields, 5, fields) + + String.format("expensiveShops(howMany : %d howLong : %d) { %s expensiveDepartments( howMany : %d howLong : %d) { %s expensiveProducts(howMany : %d howLong : %d) { %s }}}\n" + , howMany, howLong, fields, 10, howLong, fields, 5, howLong, fields) + + "}"; + return graphQL.executeAsync(ExecutionInput.newExecutionInput(query).build()); + } + + private GraphQL buildGraphQL() { + TypeDefinitionRegistry definitionRegistry = new SchemaParser().parse(BenchmarkUtils.loadResource("storesanddepartments.graphqls")); + + DataFetcher shopsDF = env -> mkHowManyThings(env.getArgument("howMany")); + DataFetcher expensiveShopsDF = env -> supplyAsync(() -> sleepAndReturnThings(env)); + DataFetcher departmentsDF = env -> mkHowManyThings(env.getArgument("howMany")); + DataFetcher expensiveDepartmentsDF = env -> supplyAsyncListItems(env, () -> sleepAndReturnThings(env)); + DataFetcher productsDF = env -> mkHowManyThings(env.getArgument("howMany")); + DataFetcher expensiveProductsDF = env -> supplyAsyncListItems(env, () -> sleepAndReturnThings(env)); + + RuntimeWiring runtimeWiring = RuntimeWiring.newRuntimeWiring() + .type(newTypeWiring("Query") + .dataFetcher("shops", shopsDF) + .dataFetcher("expensiveShops", expensiveShopsDF)) + .type(newTypeWiring("Shop") + .dataFetcher("departments", departmentsDF) + .dataFetcher("expensiveDepartments", expensiveDepartmentsDF)) + .type(newTypeWiring("Department") + .dataFetcher("products", productsDF) + .dataFetcher("expensiveProducts", expensiveProductsDF)) + .build(); + + GraphQLSchema graphQLSchema = new SchemaGenerator().makeExecutableSchema(definitionRegistry, runtimeWiring); + + return GraphQL.newGraphQL(graphQLSchema).build(); + } + + private CompletableFuture supplyAsyncListItems(DataFetchingEnvironment environment, Supplier codeToRun) { + return supplyAsync(codeToRun); + } + + private CompletableFuture supplyAsync(Supplier codeToRun) { + if (!shutDown) { + //logEvery(100, "async fetcher"); + return CompletableFuture.supplyAsync(codeToRun, fetchersExecutorService); + } else { + // if we have shutdown - get on with it, so we shut down quicker + return CompletableFuture.completedFuture(codeToRun.get()); + } + } + + private List sleepAndReturnThings(DataFetchingEnvironment env) { + // by sleeping, we hope to cause the objects to stay longer in GC land and hence have a longer lifecycle + // then a simple stack say or young gen gc. I don't know this will work, but I am trying it + // to represent work that takes some tie to complete + sleep(env.getArgument("howLong")); + return mkHowManyThings(env.getArgument("howMany")); + } + + private void sleep(Integer howLong) { + if (howLong > 0) { + try { + Thread.sleep(howLong); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + } + + AtomicInteger logCount = new AtomicInteger(); + + private void logEvery(int every, String s) { + int count = logCount.getAndIncrement(); + if (count == 0 || count % every == 0) { + System.out.println("\t" + count + "\t" + s); + } + } + + private List mkHowManyThings(Integer howMany) { + ImmutableList.Builder builder = ImmutableList.builder(); + for (int i = 0; i < howMany; i++) { + builder.add(new IdAndNamedThing(i)); + } + return builder.build(); + } + + @SuppressWarnings("unused") + static class IdAndNamedThing { + private final int i; + + public IdAndNamedThing(int i) { + this.i = i; + } + + public String getId() { + return "id" + i; + } + + public String getName() { + return "name" + i; + } + + public String getF1() { + return "f1" + i; + } + + public String getF2() { + return "f2" + i; + } + + public String getF3() { + return "f3" + i; + } + + public String getF4() { + return "f4" + i; + } + + public String getF5() { + return "f5" + i; + } + + public String getF6() { + return "f6" + i; + } + + public String getF7() { + return "f7" + i; + } + + public String getF8() { + return "f8" + i; + } + + public String getF9() { + return "f9" + i; + } + + public String getF10() { + return "f10" + i; + } + } +} diff --git a/src/test/java/benchmark/SchemaBenchMark.java b/src/test/java/benchmark/CreateSchemaBenchmark.java similarity index 67% rename from src/test/java/benchmark/SchemaBenchMark.java rename to src/test/java/benchmark/CreateSchemaBenchmark.java index 23d35427e8..0b5e67b41c 100644 --- a/src/test/java/benchmark/SchemaBenchMark.java +++ b/src/test/java/benchmark/CreateSchemaBenchmark.java @@ -1,6 +1,5 @@ package benchmark; -import com.google.common.io.Files; import graphql.schema.GraphQLSchema; import graphql.schema.idl.RuntimeWiring; import graphql.schema.idl.SchemaGenerator; @@ -8,44 +7,33 @@ import graphql.schema.idl.TypeDefinitionRegistry; import org.openjdk.jmh.annotations.Benchmark; import org.openjdk.jmh.annotations.BenchmarkMode; +import org.openjdk.jmh.annotations.Fork; import org.openjdk.jmh.annotations.Measurement; import org.openjdk.jmh.annotations.Mode; import org.openjdk.jmh.annotations.OutputTimeUnit; import org.openjdk.jmh.annotations.Warmup; import org.openjdk.jmh.infra.Blackhole; -import java.io.File; -import java.net.URL; -import java.nio.charset.Charset; import java.util.concurrent.TimeUnit; -/** - * This benchmarks schema creation - *

- * See https://github.com/openjdk/jmh/tree/master/jmh-samples/src/main/java/org/openjdk/jmh/samples/ for more samples - * on what you can do with JMH - *

- * You MUST have the JMH plugin for IDEA in place for this to work : https://github.com/artyushov/idea-jmh-plugin - *

- * Install it and then just hit "Run" on a certain benchmark method - */ -@Warmup(iterations = 2, time = 5, batchSize = 3) -@Measurement(iterations = 3, time = 10, batchSize = 4) -public class SchemaBenchMark { +@Warmup(iterations = 2, time = 5) +@Measurement(iterations = 3) +@Fork(3) +public class CreateSchemaBenchmark { static String largeSDL = BenchmarkUtils.loadResource("large-schema-3.graphqls"); @Benchmark @BenchmarkMode(Mode.Throughput) @OutputTimeUnit(TimeUnit.MINUTES) - public void benchMarkLargeSchemaCreate(Blackhole blackhole) { + public void benchmarkLargeSchemaCreate(Blackhole blackhole) { blackhole.consume(createSchema(largeSDL)); } @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MILLISECONDS) - public void benchMarkLargeSchemaCreateAvgTime(Blackhole blackhole) { + public void benchmarkLargeSchemaCreateAvgTime(Blackhole blackhole) { blackhole.consume(createSchema(largeSDL)); } diff --git a/src/test/java/benchmark/DFSelectionSetBenchmark.java b/src/test/java/benchmark/DFSelectionSetBenchmark.java index 2687a444a0..01081cf51c 100644 --- a/src/test/java/benchmark/DFSelectionSetBenchmark.java +++ b/src/test/java/benchmark/DFSelectionSetBenchmark.java @@ -21,7 +21,6 @@ import org.openjdk.jmh.annotations.Scope; import org.openjdk.jmh.annotations.Setup; import org.openjdk.jmh.annotations.State; -import org.openjdk.jmh.annotations.Threads; import org.openjdk.jmh.annotations.Warmup; import org.openjdk.jmh.infra.Blackhole; @@ -29,9 +28,9 @@ import java.util.concurrent.TimeUnit; @State(Scope.Benchmark) -@BenchmarkMode(Mode.Throughput) -@Warmup(iterations = 2) -@Measurement(iterations = 2, timeUnit = TimeUnit.NANOSECONDS) +@Warmup(iterations = 2, time = 5) +@Measurement(iterations = 3) +@Fork(3) public class DFSelectionSetBenchmark { @State(Scope.Benchmark) @@ -65,10 +64,6 @@ public void setup() { } @Benchmark - @Warmup(iterations = 2) - @Measurement(iterations = 5, time = 10) - @Threads(1) - @Fork(3) @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MILLISECONDS) public void benchMarkAvgTime(MyState myState, Blackhole blackhole) { @@ -77,10 +72,6 @@ public void benchMarkAvgTime(MyState myState, Blackhole blackhole) { } @Benchmark - @Warmup(iterations = 2) - @Measurement(iterations = 5, time = 10) - @Threads(1) - @Fork(3) @BenchmarkMode(Mode.Throughput) @OutputTimeUnit(TimeUnit.MILLISECONDS) public void benchMarkThroughput(MyState myState, Blackhole blackhole) { diff --git a/src/test/java/benchmark/NQBenchmark1.java b/src/test/java/benchmark/ENFBenchmark1.java similarity index 84% rename from src/test/java/benchmark/NQBenchmark1.java rename to src/test/java/benchmark/ENFBenchmark1.java index d47d7b7a4c..6c10de1b19 100644 --- a/src/test/java/benchmark/NQBenchmark1.java +++ b/src/test/java/benchmark/ENFBenchmark1.java @@ -16,17 +16,16 @@ import org.openjdk.jmh.annotations.Scope; import org.openjdk.jmh.annotations.Setup; import org.openjdk.jmh.annotations.State; -import org.openjdk.jmh.annotations.Threads; import org.openjdk.jmh.annotations.Warmup; import org.openjdk.jmh.infra.Blackhole; import java.util.concurrent.TimeUnit; @State(Scope.Benchmark) -@BenchmarkMode(Mode.Throughput) -@Warmup(iterations = 2) -@Measurement(iterations = 2, timeUnit = TimeUnit.NANOSECONDS) -public class NQBenchmark1 { +@Warmup(iterations = 2, time = 5) +@Measurement(iterations = 3) +@Fork(3) +public class ENFBenchmark1 { @State(Scope.Benchmark) public static class MyState { @@ -49,24 +48,16 @@ public void setup() { } @Benchmark - @Warmup(iterations = 2) - @Measurement(iterations = 3, time = 10) - @Threads(1) - @Fork(3) @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MILLISECONDS) - public void benchMarkAvgTime(MyState myState, Blackhole blackhole ) { + public void benchMarkAvgTime(MyState myState, Blackhole blackhole) { runImpl(myState, blackhole); } @Benchmark - @Warmup(iterations = 2) - @Measurement(iterations = 3, time = 10) - @Threads(1) - @Fork(3) @BenchmarkMode(Mode.Throughput) @OutputTimeUnit(TimeUnit.SECONDS) - public void benchMarkThroughput(MyState myState, Blackhole blackhole ) { + public void benchMarkThroughput(MyState myState, Blackhole blackhole) { runImpl(myState, blackhole); } diff --git a/src/test/java/benchmark/ENFBenchmark2.java b/src/test/java/benchmark/ENFBenchmark2.java new file mode 100644 index 0000000000..a2a0ad3648 --- /dev/null +++ b/src/test/java/benchmark/ENFBenchmark2.java @@ -0,0 +1,59 @@ +package benchmark; + +import graphql.execution.CoercedVariables; +import graphql.language.Document; +import graphql.normalized.ExecutableNormalizedOperation; +import graphql.normalized.ExecutableNormalizedOperationFactory; +import graphql.parser.Parser; +import graphql.schema.GraphQLSchema; +import graphql.schema.idl.SchemaGenerator; +import org.openjdk.jmh.annotations.Benchmark; +import org.openjdk.jmh.annotations.BenchmarkMode; +import org.openjdk.jmh.annotations.Fork; +import org.openjdk.jmh.annotations.Measurement; +import org.openjdk.jmh.annotations.Mode; +import org.openjdk.jmh.annotations.OutputTimeUnit; +import org.openjdk.jmh.annotations.Scope; +import org.openjdk.jmh.annotations.Setup; +import org.openjdk.jmh.annotations.State; +import org.openjdk.jmh.annotations.Warmup; + +import java.util.concurrent.TimeUnit; + +@State(Scope.Benchmark) +@Warmup(iterations = 2, time = 5) +@Measurement(iterations = 3) +@Fork(3) +public class ENFBenchmark2 { + + @State(Scope.Benchmark) + public static class MyState { + + GraphQLSchema schema; + Document document; + + @Setup + public void setup() { + try { + String schemaString = BenchmarkUtils.loadResource("large-schema-2.graphqls"); + schema = SchemaGenerator.createdMockedSchema(schemaString); + + String query = BenchmarkUtils.loadResource("large-schema-2-query.graphql"); + document = Parser.parse(query); + } catch (Exception e) { + throw new RuntimeException(e); + } + } + + } + + @Benchmark + @BenchmarkMode(Mode.AverageTime) + @OutputTimeUnit(TimeUnit.MILLISECONDS) + public ExecutableNormalizedOperation benchMarkAvgTime(MyState myState) { + ExecutableNormalizedOperation executableNormalizedOperation = ExecutableNormalizedOperationFactory.createExecutableNormalizedOperation(myState.schema, myState.document, null, CoercedVariables.emptyVariables()); +// System.out.println("fields size:" + normalizedQuery.getFieldToNormalizedField().size()); + return executableNormalizedOperation; + } + +} diff --git a/src/test/java/benchmark/ENFBenchmarkDeepIntrospection.java b/src/test/java/benchmark/ENFBenchmarkDeepIntrospection.java new file mode 100644 index 0000000000..0ed09d4675 --- /dev/null +++ b/src/test/java/benchmark/ENFBenchmarkDeepIntrospection.java @@ -0,0 +1,122 @@ +package benchmark; + +import graphql.execution.CoercedVariables; +import graphql.language.Document; +import graphql.normalized.ExecutableNormalizedOperation; +import graphql.normalized.ExecutableNormalizedOperationFactory; +import graphql.parser.Parser; +import graphql.schema.GraphQLSchema; +import graphql.schema.idl.SchemaGenerator; +import org.openjdk.jmh.annotations.Benchmark; +import org.openjdk.jmh.annotations.BenchmarkMode; +import org.openjdk.jmh.annotations.Fork; +import org.openjdk.jmh.annotations.Level; +import org.openjdk.jmh.annotations.Measurement; +import org.openjdk.jmh.annotations.Mode; +import org.openjdk.jmh.annotations.OutputTimeUnit; +import org.openjdk.jmh.annotations.Param; +import org.openjdk.jmh.annotations.Scope; +import org.openjdk.jmh.annotations.Setup; +import org.openjdk.jmh.annotations.State; +import org.openjdk.jmh.annotations.Warmup; +import org.openjdk.jmh.runner.Runner; +import org.openjdk.jmh.runner.RunnerException; +import org.openjdk.jmh.runner.options.Options; +import org.openjdk.jmh.runner.options.OptionsBuilder; + +import java.util.concurrent.TimeUnit; + +import static graphql.normalized.ExecutableNormalizedOperationFactory.*; + +@State(Scope.Benchmark) +@Warmup(iterations = 2, time = 5) +@Measurement(iterations = 3, time = 5) +@Fork(2) +public class ENFBenchmarkDeepIntrospection { + + @Param({"2", "10", "20"}) + int howDeep = 2; + + String query = ""; + + GraphQLSchema schema; + Document document; + + @Setup(Level.Trial) + public void setUp() { + String schemaString = BenchmarkUtils.loadResource("large-schema-2.graphqls"); + schema = SchemaGenerator.createdMockedSchema(schemaString); + + query = createDeepQuery(howDeep); + document = Parser.parse(query); + } + @Benchmark + @BenchmarkMode(Mode.AverageTime) + @OutputTimeUnit(TimeUnit.MILLISECONDS) + public ExecutableNormalizedOperation benchMarkAvgTime() { + ExecutableNormalizedOperationFactory.Options options = ExecutableNormalizedOperationFactory.Options.defaultOptions(); + ExecutableNormalizedOperation executableNormalizedOperation = createExecutableNormalizedOperation(schema, + document, + null, + CoercedVariables.emptyVariables(), + options); + return executableNormalizedOperation; + } + + public static void main(String[] args) throws RunnerException { + runAtStartup(); + + Options opt = new OptionsBuilder() + .include("benchmark.ENFBenchmarkDeepIntrospection") + .build(); + + new Runner(opt).run(); + } + + private static void runAtStartup() { + + ENFBenchmarkDeepIntrospection benchmarkIntrospection = new ENFBenchmarkDeepIntrospection(); + benchmarkIntrospection.howDeep = 2; + + BenchmarkUtils.runInToolingForSomeTimeThenExit( + benchmarkIntrospection::setUp, + () -> { while (true) { benchmarkIntrospection.benchMarkAvgTime(); }}, + () ->{} + ); + } + + + + private static String createDeepQuery(int depth) { + String result = "query test {\n" + + " __schema {\n" + + " types {\n" + + " ...F1\n" + + " }\n" + + " }\n" + + "}\n"; + + for (int i = 1; i < depth; i++) { + result += " fragment F" + i + " on __Type {\n" + + " fields {\n" + + " type {\n" + + " ...F" + (i + 1) +"\n" + + " }\n" + + " }\n" + + "\n" + + " ofType {\n" + + " ...F"+ (i + 1) + "\n" + + " }\n" + + " }\n"; + } + result += " fragment F" + depth + " on __Type {\n" + + " fields {\n" + + " type {\n" + + "name\n" + + " }\n" + + " }\n" + + "}\n"; + return result; + } + +} diff --git a/src/test/java/benchmark/NQExtraLargeBenchmark.java b/src/test/java/benchmark/ENFExtraLargeBenchmark.java similarity index 84% rename from src/test/java/benchmark/NQExtraLargeBenchmark.java rename to src/test/java/benchmark/ENFExtraLargeBenchmark.java index fb92dbda9d..19a410a44d 100644 --- a/src/test/java/benchmark/NQExtraLargeBenchmark.java +++ b/src/test/java/benchmark/ENFExtraLargeBenchmark.java @@ -16,17 +16,16 @@ import org.openjdk.jmh.annotations.Scope; import org.openjdk.jmh.annotations.Setup; import org.openjdk.jmh.annotations.State; -import org.openjdk.jmh.annotations.Threads; import org.openjdk.jmh.annotations.Warmup; import org.openjdk.jmh.infra.Blackhole; import java.util.concurrent.TimeUnit; @State(Scope.Benchmark) -@BenchmarkMode(Mode.Throughput) -@Warmup(iterations = 2) -@Measurement(iterations = 2, timeUnit = TimeUnit.NANOSECONDS) -public class NQExtraLargeBenchmark { +@Warmup(iterations = 2, time = 5) +@Measurement(iterations = 3) +@Fork(3) +public class ENFExtraLargeBenchmark { @State(Scope.Benchmark) public static class MyState { @@ -49,24 +48,16 @@ public void setup() { } @Benchmark - @Warmup(iterations = 2) - @Measurement(iterations = 3, time = 10) - @Threads(1) - @Fork(3) @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MILLISECONDS) - public void benchMarkAvgTime(MyState myState, Blackhole blackhole ) { + public void benchMarkAvgTime(MyState myState, Blackhole blackhole) { runImpl(myState, blackhole); } @Benchmark - @Warmup(iterations = 2) - @Measurement(iterations = 3, time = 10) - @Threads(1) - @Fork(3) @BenchmarkMode(Mode.Throughput) @OutputTimeUnit(TimeUnit.SECONDS) - public void benchMarkThroughput(MyState myState, Blackhole blackhole ) { + public void benchMarkThroughput(MyState myState, Blackhole blackhole) { runImpl(myState, blackhole); } diff --git a/src/test/java/benchmark/GetterAccessBenchmark.java b/src/test/java/benchmark/GetterAccessBenchmark.java index 7dd8c3d719..d7ff9b752c 100644 --- a/src/test/java/benchmark/GetterAccessBenchmark.java +++ b/src/test/java/benchmark/GetterAccessBenchmark.java @@ -2,6 +2,7 @@ import graphql.schema.fetching.LambdaFetchingSupport; import org.openjdk.jmh.annotations.Benchmark; +import org.openjdk.jmh.annotations.Fork; import org.openjdk.jmh.annotations.Measurement; import org.openjdk.jmh.annotations.Warmup; import org.openjdk.jmh.infra.Blackhole; @@ -10,8 +11,9 @@ import java.lang.reflect.Method; import java.util.function.Function; -@Warmup(iterations = 2, time = 2, batchSize = 3) -@Measurement(iterations = 3, time = 2, batchSize = 4) +@Warmup(iterations = 2, time = 5, batchSize = 500) +@Measurement(iterations = 3, batchSize = 500) +@Fork(3) public class GetterAccessBenchmark { public static class Pojo { @@ -69,3 +71,4 @@ public void measureReflectionAccess(Blackhole bh) { } } } + diff --git a/src/test/java/benchmark/IntMapBenchmark.java b/src/test/java/benchmark/IntMapBenchmark.java index a50b072908..2dd74732c0 100644 --- a/src/test/java/benchmark/IntMapBenchmark.java +++ b/src/test/java/benchmark/IntMapBenchmark.java @@ -2,9 +2,8 @@ import graphql.execution.instrumentation.dataloader.LevelMap; import org.openjdk.jmh.annotations.Benchmark; -import org.openjdk.jmh.annotations.BenchmarkMode; +import org.openjdk.jmh.annotations.Fork; import org.openjdk.jmh.annotations.Measurement; -import org.openjdk.jmh.annotations.Mode; import org.openjdk.jmh.annotations.Scope; import org.openjdk.jmh.annotations.State; import org.openjdk.jmh.annotations.Warmup; @@ -12,34 +11,33 @@ import java.util.LinkedHashMap; import java.util.Map; -import java.util.concurrent.TimeUnit; @State(Scope.Benchmark) -@BenchmarkMode(Mode.Throughput) -@Warmup(iterations = 2) -@Measurement(iterations = 2, timeUnit = TimeUnit.NANOSECONDS) +@Warmup(iterations = 2, time = 5) +@Measurement(iterations = 3) +@Fork(3) public class IntMapBenchmark { - @Benchmark - public void benchmarkLinkedHashMap(Blackhole blackhole) { - Map result = new LinkedHashMap<>(); - for (int i = 0; i < 30; i++) { - int level = i % 10; - int count = i * 2; - result.put(level, result.getOrDefault(level, 0) + count); - blackhole.consume(result.get(level)); - } - } + @Benchmark + public void benchmarkLinkedHashMap(Blackhole blackhole) { + Map result = new LinkedHashMap<>(); + for (int i = 0; i < 30; i++) { + int level = i % 10; + int count = i * 2; + result.put(level, result.getOrDefault(level, 0) + count); + blackhole.consume(result.get(level)); + } + } - @Benchmark - public void benchmarkIntMap(Blackhole blackhole) { - LevelMap result = new LevelMap(16); - for (int i = 0; i < 30; i++) { - int level = i % 10; - int count = i * 2; - result.increment(level, count); - blackhole.consume(result.get(level)); - } - } + @Benchmark + public void benchmarkIntMap(Blackhole blackhole) { + LevelMap result = new LevelMap(16); + for (int i = 0; i < 30; i++) { + int level = i % 10; + int count = i * 2; + result.increment(level, count); + blackhole.consume(result.get(level)); + } + } } diff --git a/src/test/java/benchmark/IntrospectionBenchmark.java b/src/test/java/benchmark/IntrospectionBenchmark.java index 6745d07d62..d226d232de 100644 --- a/src/test/java/benchmark/IntrospectionBenchmark.java +++ b/src/test/java/benchmark/IntrospectionBenchmark.java @@ -2,124 +2,56 @@ import graphql.ExecutionResult; import graphql.GraphQL; -import graphql.execution.DataFetcherResult; -import graphql.execution.instrumentation.InstrumentationState; -import graphql.execution.instrumentation.SimplePerformantInstrumentation; -import graphql.execution.instrumentation.parameters.InstrumentationFieldFetchParameters; import graphql.introspection.IntrospectionQuery; -import graphql.schema.DataFetcher; -import graphql.schema.DataFetchingEnvironment; -import graphql.schema.GraphQLNamedType; import graphql.schema.GraphQLSchema; import graphql.schema.idl.SchemaGenerator; -import org.jetbrains.annotations.NotNull; import org.openjdk.jmh.annotations.Benchmark; import org.openjdk.jmh.annotations.BenchmarkMode; +import org.openjdk.jmh.annotations.Fork; import org.openjdk.jmh.annotations.Measurement; import org.openjdk.jmh.annotations.Mode; import org.openjdk.jmh.annotations.Scope; import org.openjdk.jmh.annotations.State; import org.openjdk.jmh.annotations.Warmup; - -import java.util.ArrayList; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; +import org.openjdk.jmh.runner.Runner; +import org.openjdk.jmh.runner.RunnerException; +import org.openjdk.jmh.runner.options.Options; +import org.openjdk.jmh.runner.options.OptionsBuilder; @State(Scope.Benchmark) +@Warmup(iterations = 2, time = 5) +@Measurement(iterations = 3) +@Fork(3) public class IntrospectionBenchmark { - private final GraphQL graphQL; - private final DFCountingInstrumentation countingInstrumentation = new DFCountingInstrumentation(); - - static class DFCountingInstrumentation extends SimplePerformantInstrumentation { - Map counts = new LinkedHashMap<>(); - Map times = new LinkedHashMap<>(); - - @Override - public @NotNull DataFetcher instrumentDataFetcher(DataFetcher dataFetcher, InstrumentationFieldFetchParameters parameters, InstrumentationState state) { - return (DataFetcher) env -> { - long then = System.nanoTime(); - Object value = dataFetcher.get(env); - long nanos = System.nanoTime() - then; - DataFetcherResult.Builder result = DataFetcherResult.newResult().data(value); - - String path = env.getExecutionStepInfo().getPath().toString(); - String prevTypePath = env.getLocalContext(); + @Benchmark + @BenchmarkMode(Mode.AverageTime) + public ExecutionResult benchMarkIntrospectionAvgTime() { + return graphQL.execute(IntrospectionQuery.INTROSPECTION_QUERY); + } - Object source = env.getSource(); - if (isSchemaTypesFetch(env, source)) { - String typeName = ((GraphQLNamedType) source).getName(); + @Benchmark + @BenchmarkMode(Mode.Throughput) + public ExecutionResult benchMarkIntrospectionThroughput() { + return graphQL.execute(IntrospectionQuery.INTROSPECTION_QUERY); + } - String prefix = "/__schema/types[" + typeName + "]"; - result.localContext(prefix); - prevTypePath = prefix; - } - if (prevTypePath != null) { - path = path.replaceAll("/__schema/types\\[.*\\]", prevTypePath); - } - counts.compute(path, (k, v) -> v == null ? 1 : v++); - if (nanos > 200_000) { - times.compute(path, (k, v) -> v == null ? nanos : v + nanos); - } - return result.build(); - }; - } + private final GraphQL graphQL; - private boolean isSchemaTypesFetch(DataFetchingEnvironment env, Object source) { - String parentPath = env.getExecutionStepInfo().getParent().getPath().getPathWithoutListEnd().toString(); - return "/__schema/types".equals(parentPath) && source instanceof GraphQLNamedType; - } - } public IntrospectionBenchmark() { String largeSchema = BenchmarkUtils.loadResource("large-schema-4.graphqls"); GraphQLSchema graphQLSchema = SchemaGenerator.createdMockedSchema(largeSchema); graphQL = GraphQL.newGraphQL(graphQLSchema) - //.instrumentation(countingInstrumentation) .build(); } - public static void main(String[] args) { - IntrospectionBenchmark introspectionBenchmark = new IntrospectionBenchmark(); -// while (true) { -// long then = System.currentTimeMillis(); -// ExecutionResult er = introspectionBenchmark.benchMarkIntrospection(); -// long ms = System.currentTimeMillis() - then; -// System.out.println("Took " + ms + "ms"); -// } - - introspectionBenchmark.benchMarkIntrospection(); - - Map counts = sortByValue(introspectionBenchmark.countingInstrumentation.counts); - Map times = sortByValue(introspectionBenchmark.countingInstrumentation.times); - - System.out.println("Counts"); - counts.forEach((k, v) -> System.out.printf("C %-70s : %020d\n", k, v)); - System.out.println("Times"); - times.forEach((k, v) -> System.out.printf("T %-70s : %020d\n", k, v)); - - - } - - public static > Map sortByValue(Map map) { - List> list = new ArrayList<>(map.entrySet()); - list.sort(Map.Entry.comparingByValue()); - - Map result = new LinkedHashMap<>(); - for (Map.Entry entry : list) { - result.put(entry.getKey(), entry.getValue()); - } - - return result; - } + public static void main(String[] args) throws RunnerException { + Options opt = new OptionsBuilder() + .include("benchmark.IntrospectionBenchmark") + .build(); - @Benchmark - @BenchmarkMode(Mode.AverageTime) - @Warmup(iterations = 2) - @Measurement(iterations = 3) - public ExecutionResult benchMarkIntrospection() { - return graphQL.execute(IntrospectionQuery.INTROSPECTION_QUERY); + new Runner(opt).run(); } } diff --git a/src/test/java/benchmark/ListBenchmark.java b/src/test/java/benchmark/ListBenchmark.java deleted file mode 100644 index 0609908762..0000000000 --- a/src/test/java/benchmark/ListBenchmark.java +++ /dev/null @@ -1,65 +0,0 @@ -package benchmark; - -import com.google.common.collect.ImmutableList; -import graphql.collect.ImmutableKit; -import org.openjdk.jmh.annotations.Benchmark; -import org.openjdk.jmh.annotations.BenchmarkMode; -import org.openjdk.jmh.annotations.Measurement; -import org.openjdk.jmh.annotations.Mode; -import org.openjdk.jmh.annotations.Scope; -import org.openjdk.jmh.annotations.State; -import org.openjdk.jmh.annotations.Warmup; -import org.openjdk.jmh.infra.Blackhole; - -import java.util.ArrayList; -import java.util.List; -import java.util.concurrent.TimeUnit; -import java.util.function.Function; -import java.util.stream.Collectors; - -@State(Scope.Benchmark) -@BenchmarkMode(Mode.Throughput) -@Warmup(iterations = 2) -@Measurement(iterations = 2, timeUnit = TimeUnit.NANOSECONDS) -public class ListBenchmark { - - static final List startingList = buildStartingList(); - - private static List buildStartingList() { - List list = new ArrayList<>(); - for (int i = 0; i < 10000; i++) { - list.add("String" + i); - } - return list; - } - - private final Function mapper = s -> new StringBuilder(s).reverse().toString(); - - @Benchmark - public void benchmarkListStream(Blackhole blackhole) { - List output = startingList.stream().map(mapper).collect(Collectors.toList()); - blackhole.consume(output); - } - - @Benchmark - public void benchmarkImmutableListBuilder(Blackhole blackhole) { - List output = ImmutableKit.map(startingList, mapper); - blackhole.consume(output); - } - - @Benchmark - public void benchmarkArrayList(Blackhole blackhole) { - List output = new ArrayList<>(startingList.size()); - for (String s : startingList) { - output.add(mapper.apply(s)); - } - blackhole.consume(output); - } - - @Benchmark - public void benchmarkImmutableCollectorBuilder(Blackhole blackhole) { - List output = startingList.stream().map(mapper).collect(ImmutableList.toImmutableList()); - blackhole.consume(output); - } - -} diff --git a/src/test/java/benchmark/MapBenchmark.java b/src/test/java/benchmark/MapBenchmark.java new file mode 100644 index 0000000000..04f05f73f8 --- /dev/null +++ b/src/test/java/benchmark/MapBenchmark.java @@ -0,0 +1,92 @@ +package benchmark; + +import com.google.common.collect.ImmutableMap; +import org.openjdk.jmh.annotations.Benchmark; +import org.openjdk.jmh.annotations.BenchmarkMode; +import org.openjdk.jmh.annotations.Fork; +import org.openjdk.jmh.annotations.Level; +import org.openjdk.jmh.annotations.Measurement; +import org.openjdk.jmh.annotations.Mode; +import org.openjdk.jmh.annotations.Param; +import org.openjdk.jmh.annotations.Scope; +import org.openjdk.jmh.annotations.Setup; +import org.openjdk.jmh.annotations.State; +import org.openjdk.jmh.annotations.Warmup; +import org.openjdk.jmh.infra.Blackhole; +import org.openjdk.jmh.runner.Runner; +import org.openjdk.jmh.runner.options.Options; +import org.openjdk.jmh.runner.options.OptionsBuilder; + +import java.util.HashMap; +import java.util.LinkedHashMap; +import java.util.Map; +import java.util.Random; + +@State(Scope.Benchmark) +@Warmup(iterations = 2, time = 1) +@Measurement(iterations = 3, time = 1, batchSize = 1000) +@Fork(3) +public class MapBenchmark { + + @Param({"10", "50", "300"}) + int numberEntries = 300; + + Map hashMap; + Map linkedHashMap; + Map immutableMap; + + Random random; + + @Setup(Level.Trial) + public void setUp() { + random = new Random(); + linkedHashMap = new LinkedHashMap<>(); + for (int i = 0; i < numberEntries; i++) { + linkedHashMap.put("string" + i, i); + } + hashMap = new HashMap<>(); + for (int i = 0; i < numberEntries; i++) { + hashMap.put("string" + i, i); + } + ImmutableMap.Builder builder = ImmutableMap.builder(); + for (int i = 0; i < numberEntries; i++) { + builder.put("string" + i, i); + } + immutableMap = builder.build(); + } + + @Benchmark + @BenchmarkMode(Mode.Throughput) + public void benchmarkLinkedHashMap(Blackhole blackhole) { + mapGet(blackhole, linkedHashMap); + } + @Benchmark + @BenchmarkMode(Mode.Throughput) + public void benchmarkHashMap(Blackhole blackhole) { + mapGet(blackhole, hashMap); + } + + @Benchmark + @BenchmarkMode(Mode.Throughput) + public void benchmarkImmutableMap(Blackhole blackhole) { + mapGet(blackhole, immutableMap); + } + + private void mapGet(Blackhole blackhole, Map mapp) { + int index = rand(0, numberEntries); + blackhole.consume(mapp.get("string" + index)); + } + + private int rand(int loInc, int hiExc) { + return random.nextInt(hiExc - loInc) + loInc; + } + + public static void main(String[] args) throws Exception { + Options opt = new OptionsBuilder() + .include("benchmark.MapBenchmark") + .build(); + + new Runner(opt).run(); + } +} + diff --git a/src/test/java/benchmark/NQBenchmark2.java b/src/test/java/benchmark/NQBenchmark2.java deleted file mode 100644 index 68402b4ec7..0000000000 --- a/src/test/java/benchmark/NQBenchmark2.java +++ /dev/null @@ -1,106 +0,0 @@ -package benchmark; - -import com.google.common.collect.ImmutableListMultimap; -import graphql.execution.CoercedVariables; -import graphql.language.Document; -import graphql.language.Field; -import graphql.normalized.ExecutableNormalizedField; -import graphql.normalized.ExecutableNormalizedOperation; -import graphql.normalized.ExecutableNormalizedOperationFactory; -import graphql.parser.Parser; -import graphql.schema.GraphQLSchema; -import graphql.schema.idl.SchemaGenerator; -import graphql.util.TraversalControl; -import graphql.util.Traverser; -import graphql.util.TraverserContext; -import graphql.util.TraverserVisitorStub; -import org.openjdk.jmh.annotations.Benchmark; -import org.openjdk.jmh.annotations.BenchmarkMode; -import org.openjdk.jmh.annotations.Fork; -import org.openjdk.jmh.annotations.Measurement; -import org.openjdk.jmh.annotations.Mode; -import org.openjdk.jmh.annotations.OutputTimeUnit; -import org.openjdk.jmh.annotations.Scope; -import org.openjdk.jmh.annotations.Setup; -import org.openjdk.jmh.annotations.State; -import org.openjdk.jmh.annotations.Threads; -import org.openjdk.jmh.annotations.Warmup; - -import java.util.ArrayList; -import java.util.List; -import java.util.concurrent.TimeUnit; - -@State(Scope.Benchmark) -@BenchmarkMode(Mode.Throughput) -@Warmup(iterations = 2) -@Measurement(iterations = 2, timeUnit = TimeUnit.NANOSECONDS) -public class NQBenchmark2 { - - @State(Scope.Benchmark) - public static class MyState { - - GraphQLSchema schema; - Document document; - - @Setup - public void setup() { - try { - String schemaString = BenchmarkUtils.loadResource("large-schema-2.graphqls"); - schema = SchemaGenerator.createdMockedSchema(schemaString); - - String query = BenchmarkUtils.loadResource("large-schema-2-query.graphql"); - document = Parser.parse(query); - } catch (Exception e) { - throw new RuntimeException(e); - } - } - - } - - @Benchmark - @Warmup(iterations = 2) - @Measurement(iterations = 5, time = 10) - @Threads(1) - @Fork(3) - @BenchmarkMode(Mode.AverageTime) - @OutputTimeUnit(TimeUnit.MILLISECONDS) - public ExecutableNormalizedOperation benchMarkAvgTime(MyState myState) { - ExecutableNormalizedOperation executableNormalizedOperation = ExecutableNormalizedOperationFactory.createExecutableNormalizedOperation(myState.schema, myState.document, null, CoercedVariables.emptyVariables()); -// System.out.println("fields size:" + normalizedQuery.getFieldToNormalizedField().size()); - return executableNormalizedOperation; - } - - public static void main(String[] args) { - MyState myState = new MyState(); - myState.setup(); - ExecutableNormalizedOperation executableNormalizedOperation = ExecutableNormalizedOperationFactory.createExecutableNormalizedOperation(myState.schema, myState.document, null, CoercedVariables.emptyVariables()); -// System.out.println(printTree(normalizedQuery)); - ImmutableListMultimap fieldToNormalizedField = executableNormalizedOperation.getFieldToNormalizedField(); - System.out.println(fieldToNormalizedField.size()); -// for (Field field : fieldToNormalizedField.keySet()) { -// System.out.println("field" + field); -// System.out.println("nf count:" + fieldToNormalizedField.get(field).size()); -// if (field.getName().equals("field49")) { -// ImmutableList normalizedFields = fieldToNormalizedField.get(field); -// for (NormalizedField nf : normalizedFields) { -// System.out.println(nf); -// } -// } -// } -// System.out.println("fields size:" + normalizedQuery.getFieldToNormalizedField().size()); - } - - static List printTree(ExecutableNormalizedOperation queryExecutionTree) { - List result = new ArrayList<>(); - Traverser traverser = Traverser.depthFirst(ExecutableNormalizedField::getChildren); - traverser.traverse(queryExecutionTree.getTopLevelFields(), new TraverserVisitorStub() { - @Override - public TraversalControl enter(TraverserContext context) { - ExecutableNormalizedField queryExecutionField = context.thisNode(); - result.add(queryExecutionField.printDetails()); - return TraversalControl.CONTINUE; - } - }); - return result; - } -} diff --git a/src/test/java/benchmark/OverlappingFieldValidationBenchmark.java b/src/test/java/benchmark/OverlappingFieldValidationBenchmark.java index d8cf18bb5f..7340f9342b 100644 --- a/src/test/java/benchmark/OverlappingFieldValidationBenchmark.java +++ b/src/test/java/benchmark/OverlappingFieldValidationBenchmark.java @@ -22,7 +22,6 @@ import org.openjdk.jmh.annotations.Scope; import org.openjdk.jmh.annotations.Setup; import org.openjdk.jmh.annotations.State; -import org.openjdk.jmh.annotations.Threads; import org.openjdk.jmh.annotations.Warmup; import org.openjdk.jmh.infra.Blackhole; @@ -34,12 +33,9 @@ import static graphql.Assert.assertTrue; @State(Scope.Benchmark) -@BenchmarkMode(Mode.AverageTime) -@Threads(1) @Warmup(iterations = 2, time = 5) -@Measurement(iterations = 3, time = 10) +@Measurement(iterations = 3) @Fork(3) -@OutputTimeUnit(TimeUnit.MILLISECONDS) public class OverlappingFieldValidationBenchmark { @State(Scope.Benchmark) @@ -67,12 +63,12 @@ public void setup() { } @Benchmark + @BenchmarkMode(Mode.AverageTime) public void overlappingFieldValidationAbgTime(MyState myState, Blackhole blackhole) { blackhole.consume(validateQuery(myState.schema, myState.document)); } @Benchmark - @BenchmarkMode(Mode.Throughput) @OutputTimeUnit(TimeUnit.SECONDS) public void overlappingFieldValidationThroughput(MyState myState, Blackhole blackhole) { blackhole.consume(validateQuery(myState.schema, myState.document)); diff --git a/src/test/java/benchmark/PropertyFetcherBenchMark.java b/src/test/java/benchmark/PropertyFetcherBenchMark.java index 036b4994b4..00146f8caf 100644 --- a/src/test/java/benchmark/PropertyFetcherBenchMark.java +++ b/src/test/java/benchmark/PropertyFetcherBenchMark.java @@ -5,6 +5,7 @@ import graphql.schema.PropertyDataFetcher; import org.openjdk.jmh.annotations.Benchmark; import org.openjdk.jmh.annotations.BenchmarkMode; +import org.openjdk.jmh.annotations.Fork; import org.openjdk.jmh.annotations.Measurement; import org.openjdk.jmh.annotations.Mode; import org.openjdk.jmh.annotations.OutputTimeUnit; @@ -13,18 +14,9 @@ import java.util.concurrent.TimeUnit; -/** - * This benchmarks a simple property fetch to help improve the key class PropertyDataFetcher - *

- * See https://github.com/openjdk/jmh/tree/master/jmh-samples/src/main/java/org/openjdk/jmh/samples/ for more samples - * on what you can do with JMH - *

- * You MUST have the JMH plugin for IDEA in place for this to work : https://github.com/artyushov/idea-jmh-plugin - *

- * Install it and then just hit "Run" on a certain benchmark method - */ -@Warmup(iterations = 2, time = 5, batchSize = 3) -@Measurement(iterations = 3, time = 10, batchSize = 4) +@Warmup(iterations = 2, time = 5, batchSize = 50) +@Measurement(iterations = 3, batchSize = 50) +@Fork(3) public class PropertyFetcherBenchMark { @Benchmark diff --git a/src/test/java/benchmark/QueryExecutionOrientedBenchmarks.java b/src/test/java/benchmark/QueryExecutionOrientedBenchmarks.java new file mode 100644 index 0000000000..96166a0f5f --- /dev/null +++ b/src/test/java/benchmark/QueryExecutionOrientedBenchmarks.java @@ -0,0 +1,23 @@ +package benchmark; + +import org.openjdk.jmh.runner.Runner; +import org.openjdk.jmh.runner.RunnerException; +import org.openjdk.jmh.runner.options.Options; +import org.openjdk.jmh.runner.options.OptionsBuilder; + +/** + * A runner of benchmarks that are whole query runners and they do + * so from the top of the stack all the way in + */ +public class QueryExecutionOrientedBenchmarks { + + public static void main(String[] args) throws RunnerException { + Options opt = new OptionsBuilder() + .include("benchmark.ComplexQueryBenchmark") + .include("benchmark.IntrospectionBenchmark") + .include("benchmark.TwitterBenchmark") + .build(); + + new Runner(opt).run(); + } +} diff --git a/src/test/java/benchmark/SchemaTransformerBenchmark.java b/src/test/java/benchmark/SchemaTransformerBenchmark.java index 96057aa534..669bda3f5e 100644 --- a/src/test/java/benchmark/SchemaTransformerBenchmark.java +++ b/src/test/java/benchmark/SchemaTransformerBenchmark.java @@ -1,7 +1,5 @@ package benchmark; -import com.google.common.base.Charsets; -import com.google.common.io.Resources; import graphql.schema.GraphQLDirective; import graphql.schema.GraphQLFieldDefinition; import graphql.schema.GraphQLObjectType; @@ -22,20 +20,14 @@ import org.openjdk.jmh.annotations.Scope; import org.openjdk.jmh.annotations.Setup; import org.openjdk.jmh.annotations.State; -import org.openjdk.jmh.annotations.Threads; import org.openjdk.jmh.annotations.Warmup; -import java.io.IOException; -import java.net.URL; import java.util.List; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; -import static com.google.common.io.Resources.getResource; - @State(Scope.Benchmark) @BenchmarkMode(Mode.AverageTime) -@Threads(1) @Warmup(iterations = 2, time = 5) @Measurement(iterations = 3, time = 10) @Fork(3) @@ -55,7 +47,7 @@ public static class MyState { @Override public TraversalControl visitGraphQLFieldDefinition(GraphQLFieldDefinition node, TraverserContext context) { // add directive - GraphQLFieldDefinition changedNode = node.transform( builder -> { + GraphQLFieldDefinition changedNode = node.transform(builder -> { builder.withDirective(infoDirective); }); return changeNode(context, changedNode); @@ -64,7 +56,7 @@ public TraversalControl visitGraphQLFieldDefinition(GraphQLFieldDefinition node, @Override public TraversalControl visitGraphQLObjectType(GraphQLObjectType node, TraverserContext context) { // add directive info - GraphQLObjectType changedNode = node.transform( builder -> { + GraphQLObjectType changedNode = node.transform(builder -> { builder.withDirective(infoDirective); }); return changeNode(context, changedNode); @@ -78,7 +70,7 @@ public TraversalControl visitGraphQLFieldDefinition(GraphQLFieldDefinition node, .filter(d -> !d.getName().equals(infoDirective.getName())) .collect(Collectors.toList()); // remove directive info - GraphQLFieldDefinition changedNode = node.transform( builder -> { + GraphQLFieldDefinition changedNode = node.transform(builder -> { builder.replaceDirectives(filteredDirectives); }); return changeNode(context, changedNode); @@ -90,7 +82,7 @@ public TraversalControl visitGraphQLObjectType(GraphQLObjectType node, Traverser .filter(d -> !d.getName().equals(infoDirective.getName())) .collect(Collectors.toList()); // remove directive info - GraphQLObjectType changedNode = node.transform( builder -> { + GraphQLObjectType changedNode = node.transform(builder -> { builder.replaceDirectives(filteredDirectives); }); return changeNode(context, changedNode); diff --git a/src/test/java/benchmark/BenchMark.java b/src/test/java/benchmark/SimpleQueryBenchmark.java similarity index 86% rename from src/test/java/benchmark/BenchMark.java rename to src/test/java/benchmark/SimpleQueryBenchmark.java index 29875d7841..0cce5c0fb5 100644 --- a/src/test/java/benchmark/BenchMark.java +++ b/src/test/java/benchmark/SimpleQueryBenchmark.java @@ -14,6 +14,7 @@ import graphql.schema.idl.TypeDefinitionRegistry; import org.openjdk.jmh.annotations.Benchmark; import org.openjdk.jmh.annotations.BenchmarkMode; +import org.openjdk.jmh.annotations.Fork; import org.openjdk.jmh.annotations.Measurement; import org.openjdk.jmh.annotations.Mode; import org.openjdk.jmh.annotations.OutputTimeUnit; @@ -25,17 +26,10 @@ import static graphql.schema.idl.TypeRuntimeWiring.newTypeWiring; -/** - * See this link for more samples - * on what you can do with JMH. - *

- * You MUST have the JMH plugin for IDEA in place for this to work : idea-jmh-plugin - *

- * Install it and then just hit "Run" on a certain benchmark method - */ -@Warmup(iterations = 2, time = 5, batchSize = 3) -@Measurement(iterations = 3, time = 10, batchSize = 4) -public class BenchMark { +@Warmup(iterations = 2, time = 5) +@Measurement(iterations = 3) +@Fork(3) +public class SimpleQueryBenchmark { private static final int NUMBER_OF_FRIENDS = 10 * 100; private static final GraphQL GRAPHQL = buildGraphQL(); diff --git a/src/test/java/benchmark/TwitterBenchmark.java b/src/test/java/benchmark/TwitterBenchmark.java index 4164f8a40b..9136fff7cb 100644 --- a/src/test/java/benchmark/TwitterBenchmark.java +++ b/src/test/java/benchmark/TwitterBenchmark.java @@ -19,9 +19,12 @@ import org.openjdk.jmh.annotations.Measurement; import org.openjdk.jmh.annotations.Mode; import org.openjdk.jmh.annotations.OutputTimeUnit; -import org.openjdk.jmh.annotations.Threads; import org.openjdk.jmh.annotations.Warmup; import org.openjdk.jmh.infra.Blackhole; +import org.openjdk.jmh.runner.Runner; +import org.openjdk.jmh.runner.RunnerException; +import org.openjdk.jmh.runner.options.Options; +import org.openjdk.jmh.runner.options.OptionsBuilder; import java.util.ArrayList; import java.util.List; @@ -30,108 +33,116 @@ import static graphql.Scalars.GraphQLString; -@Warmup(iterations = 8, time = 10) -@Measurement(iterations = 25, time = 10) -@Fork(1) -@Threads(1) +@Warmup(iterations = 2, time = 5) +@Measurement(iterations = 3) +@Fork(3) public class TwitterBenchmark { - private static final int BREADTH = 150; - private static final int DEPTH = 150; - - static String query = mkQuery(); - static Object queryId = "QUERY_ID"; - static GraphQL graphQL = buildGraphQL(); - - @Benchmark - @BenchmarkMode(Mode.Throughput) - @OutputTimeUnit(TimeUnit.SECONDS) - public void execute(Blackhole bh) { - bh.consume(execute()); - } - - private static ExecutionResult execute() { - return graphQL.execute(query); - } - - public static String mkQuery() { - StringBuilder sb = new StringBuilder(); - sb.append("{"); - for (int d=1; d <= DEPTH; d++) { - for (int b=1; b <= BREADTH; b++) { - sb.append("leaf_"); - sb.append(b); - sb.append(" "); - } - if (d < DEPTH) { - sb.append("branch { "); - } + private static final int BREADTH = 150; + private static final int DEPTH = 150; + + static String query = mkQuery(); + static Object queryId = "QUERY_ID"; + static GraphQL graphQL = buildGraphQL(); + + @Benchmark + @BenchmarkMode(Mode.Throughput) + @OutputTimeUnit(TimeUnit.SECONDS) + public void benchmarkThroughput(Blackhole bh) { + bh.consume(execute()); } - for (int d=1; d <= DEPTH; d++) { - sb.append("}"); + + @Benchmark + @BenchmarkMode(Mode.AverageTime) + public void benchmarkAvgTime(Blackhole bh) { + bh.consume(execute()); } - return sb.toString(); - } - - private static GraphQL buildGraphQL() { - ParserOptions.setDefaultOperationParserOptions(ParserOptions.newParserOptions().maxTokens(100_000).build()); - - List leafFields = new ArrayList<>(BREADTH); - for (int i = 1; i <= BREADTH; i++) { - leafFields.add( - GraphQLFieldDefinition.newFieldDefinition() - .name("leaf_" + i) - .type(GraphQLString) - .build() - ); + + private static ExecutionResult execute() { + return graphQL.execute(query); } - GraphQLObjectType branchType = GraphQLObjectType.newObject() - .name("Branch") - .fields(leafFields) - .field(GraphQLFieldDefinition.newFieldDefinition() - .name("branch") - .type(GraphQLTypeReference.typeRef("Branch"))) - .build(); - - - DataFetcher simpleFetcher = env -> env.getField().getName(); - GraphQLCodeRegistry codeReg = GraphQLCodeRegistry.newCodeRegistry() - .defaultDataFetcher( - environment -> simpleFetcher - ) - .build(); - - GraphQLSchema graphQLSchema = GraphQLSchema.newSchema() - .query(branchType) - .codeRegistry(codeReg) - .build(); - - return GraphQL - .newGraphQL(graphQLSchema) - .preparsedDocumentProvider( - new PersistedQuery( - InMemoryPersistedQueryCache - .newInMemoryPersistedQueryCache() - .addQuery(queryId, query) - .build() - ) - ) - .build(); - } - - static class PersistedQuery extends PersistedQuerySupport { - public PersistedQuery(PersistedQueryCache persistedQueryCache) { - super(persistedQueryCache); + public static String mkQuery() { + StringBuilder sb = new StringBuilder(); + sb.append("{"); + for (int d = 1; d <= DEPTH; d++) { + for (int b = 1; b <= BREADTH; b++) { + sb.append("leaf_"); + sb.append(b); + sb.append(" "); + } + if (d < DEPTH) { + sb.append("branch { "); + } + } + for (int d = 1; d <= DEPTH; d++) { + sb.append("}"); + } + return sb.toString(); } - @Override - protected Optional getPersistedQueryId(ExecutionInput executionInput) { - return Optional.of(queryId); + private static GraphQL buildGraphQL() { + ParserOptions.setDefaultOperationParserOptions(ParserOptions.newParserOptions().maxTokens(100_000).build()); + + List leafFields = new ArrayList<>(BREADTH); + for (int i = 1; i <= BREADTH; i++) { + leafFields.add( + GraphQLFieldDefinition.newFieldDefinition() + .name("leaf_" + i) + .type(GraphQLString) + .build() + ); + } + + GraphQLObjectType branchType = GraphQLObjectType.newObject() + .name("Branch") + .fields(leafFields) + .field(GraphQLFieldDefinition.newFieldDefinition() + .name("branch") + .type(GraphQLTypeReference.typeRef("Branch"))) + .build(); + + + DataFetcher simpleFetcher = env -> env.getField().getName(); + GraphQLCodeRegistry codeReg = GraphQLCodeRegistry.newCodeRegistry() + .defaultDataFetcher( + environment -> simpleFetcher + ) + .build(); + + GraphQLSchema graphQLSchema = GraphQLSchema.newSchema() + .query(branchType) + .codeRegistry(codeReg) + .build(); + + return GraphQL + .newGraphQL(graphQLSchema) + .preparsedDocumentProvider( + new PersistedQuery( + InMemoryPersistedQueryCache + .newInMemoryPersistedQueryCache() + .addQuery(queryId, query) + .build() + ) + ) + .build(); } - } - public static void main(String[] args) { - ExecutionResult result = execute(); - System.out.println(result); - } + static class PersistedQuery extends PersistedQuerySupport { + public PersistedQuery(PersistedQueryCache persistedQueryCache) { + super(persistedQueryCache); + } + + @Override + protected Optional getPersistedQueryId(ExecutionInput executionInput) { + return Optional.of(queryId); + } + } + + public static void main(String[] args) throws RunnerException { + Options opt = new OptionsBuilder() + .include("benchmark.TwitterBenchmark") + .build(); + + new Runner(opt).run(); + } } diff --git a/src/test/java/benchmark/TypeDefinitionParserVersusSerializeBenchMark.java b/src/test/java/benchmark/TypeDefinitionParserVersusSerializeBenchmark.java similarity index 76% rename from src/test/java/benchmark/TypeDefinitionParserVersusSerializeBenchMark.java rename to src/test/java/benchmark/TypeDefinitionParserVersusSerializeBenchmark.java index 931054bb72..995700d07f 100644 --- a/src/test/java/benchmark/TypeDefinitionParserVersusSerializeBenchMark.java +++ b/src/test/java/benchmark/TypeDefinitionParserVersusSerializeBenchmark.java @@ -4,6 +4,7 @@ import graphql.schema.idl.TypeDefinitionRegistry; import org.openjdk.jmh.annotations.Benchmark; import org.openjdk.jmh.annotations.BenchmarkMode; +import org.openjdk.jmh.annotations.Fork; import org.openjdk.jmh.annotations.Measurement; import org.openjdk.jmh.annotations.Mode; import org.openjdk.jmh.annotations.OutputTimeUnit; @@ -18,19 +19,10 @@ import static benchmark.BenchmarkUtils.asRTE; -/** - * This benchmarks {@link graphql.schema.idl.TypeDefinitionRegistry} parsing and serialisation - *

- * See https://github.com/openjdk/jmh/tree/master/jmh-samples/src/main/java/org/openjdk/jmh/samples/ for more samples - * on what you can do with JMH - *

- * You MUST have the JMH plugin for IDEA in place for this to work : https://github.com/artyushov/idea-jmh-plugin - *

- * Install it and then just hit "Run" on a certain benchmark method - */ -@Warmup(iterations = 2, time = 5, batchSize = 3) -@Measurement(iterations = 3, time = 10, batchSize = 4) -public class TypeDefinitionParserVersusSerializeBenchMark { +@Warmup(iterations = 2, time = 5) +@Measurement(iterations = 3) +@Fork(3) +public class TypeDefinitionParserVersusSerializeBenchmark { static SchemaParser schemaParser = new SchemaParser(); static String SDL = BenchmarkUtils.loadResource("large-schema-2.graphqls"); diff --git a/src/test/java/benchmark/ValidatorBenchmark.java b/src/test/java/benchmark/ValidatorBenchmark.java index 9db2384f29..71dc0aa33c 100644 --- a/src/test/java/benchmark/ValidatorBenchmark.java +++ b/src/test/java/benchmark/ValidatorBenchmark.java @@ -1,8 +1,12 @@ package benchmark; -import java.util.Locale; -import java.util.concurrent.TimeUnit; - +import graphql.ExecutionResult; +import graphql.GraphQL; +import graphql.language.Document; +import graphql.parser.Parser; +import graphql.schema.GraphQLSchema; +import graphql.schema.idl.SchemaGenerator; +import graphql.validation.Validator; import org.openjdk.jmh.annotations.Benchmark; import org.openjdk.jmh.annotations.BenchmarkMode; import org.openjdk.jmh.annotations.Fork; @@ -12,26 +16,19 @@ import org.openjdk.jmh.annotations.Scope; import org.openjdk.jmh.annotations.Setup; import org.openjdk.jmh.annotations.State; -import org.openjdk.jmh.annotations.Threads; import org.openjdk.jmh.annotations.Warmup; -import graphql.ExecutionResult; -import graphql.GraphQL; -import graphql.language.Document; -import graphql.parser.Parser; -import graphql.schema.GraphQLSchema; -import graphql.schema.idl.SchemaGenerator; -import graphql.validation.Validator; +import java.util.Locale; +import java.util.concurrent.TimeUnit; import static graphql.Assert.assertTrue; @State(Scope.Benchmark) @BenchmarkMode(Mode.AverageTime) -@Threads(1) -@Warmup(iterations = 5, time = 5) -@Measurement(iterations = 10, time = 10) -@Fork(1) +@Warmup(iterations = 2, time = 5) +@Measurement(iterations = 3) +@Fork(3) @OutputTimeUnit(TimeUnit.MILLISECONDS) public class ValidatorBenchmark { diff --git a/src/test/resources/storesanddepartments.graphqls b/src/test/resources/storesanddepartments.graphqls index 8a8defd3e8..57c83e3ab7 100644 --- a/src/test/resources/storesanddepartments.graphqls +++ b/src/test/resources/storesanddepartments.graphqls @@ -4,25 +4,55 @@ schema { } type Query { - shops: [Shop] - expensiveShops: [Shop] + shops(howMany : Int = 5): [Shop] + expensiveShops(howMany : Int = 5, howLong : Int = 0): [Shop] } type Shop { id: ID! name: String! - departments: [Department] - expensiveDepartments: [Department] + f1 : String + f2 : String + f3 : String + f4 : String + f5 : String + f6 : String + f7 : String + f8 : String + f9 : String + f10 : String + departments(howMany : Int = 5): [Department] + expensiveDepartments(howMany : Int = 5, howLong : Int = 0): [Department] } type Department { id: ID! name: String! - products: [Product] - expensiveProducts: [Product] + f1 : String + f2 : String + f3 : String + f4 : String + f5 : String + f6 : String + f7 : String + f8 : String + f9 : String + f10 : String + products(howMany : Int = 5): [Product] + expensiveProducts(howMany : Int = 5, howLong : Int = 0): [Product] } type Product { id: ID! name: String! + f1 : String + f2 : String + f3 : String + f4 : String + f5 : String + f6 : String + f7 : String + f8 : String + f9 : String + f10 : String }