Skip to content

Commit

Permalink
simplify CollectFields for @defer and @stream (graphql#3994)
Browse files Browse the repository at this point in the history
minimizes the changes to `CollectFields` required for incremental delivery (inspired by graphql#3982) -- but retains a single memoized incremental field plan per list item.
  • Loading branch information
yaacovCR committed Dec 16, 2023
1 parent 688ee2f commit 6c701c7
Show file tree
Hide file tree
Showing 6 changed files with 364 additions and 329 deletions.
46 changes: 25 additions & 21 deletions src/execution/IncrementalPublisher.ts
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ import type {
GraphQLFormattedError,
} from '../error/GraphQLError.js';

import type { GroupedFieldSet } from './collectFields.js';
import type { GroupedFieldSet } from './buildFieldPlan.js';

interface IncrementalUpdate<TData = unknown, TExtensions = ObjMap<unknown>> {
pending: ReadonlyArray<PendingResult>;
Expand Down Expand Up @@ -301,25 +301,20 @@ export class IncrementalPublisher {
initialResultRecord: InitialResultRecord,
data: ObjMap<unknown> | null,
): ExecutionResult | ExperimentalIncrementalExecutionResults {
const pendingSources = new Set<DeferredFragmentRecord | StreamRecord>();
for (const child of initialResultRecord.children) {
if (child.filtered) {
continue;
}
this._publish(child);
const maybePendingSource = this._publish(child);
if (maybePendingSource) {
pendingSources.add(maybePendingSource);
}
}

const errors = initialResultRecord.errors;
const initialResult = errors.length === 0 ? { data } : { errors, data };
const pending = this._pending;
if (pending.size > 0) {
const pendingSources = new Set<DeferredFragmentRecord | StreamRecord>();
for (const subsequentResultRecord of pending) {
const pendingSource = isStreamItemsRecord(subsequentResultRecord)
? subsequentResultRecord.streamRecord
: subsequentResultRecord;
pendingSources.add(pendingSource);
}

if (pendingSources.size > 0) {
return {
initialResult: {
...initialResult,
Expand Down Expand Up @@ -542,13 +537,10 @@ export class IncrementalPublisher {
if (child.filtered) {
continue;
}
const pendingSource = isStreamItemsRecord(child)
? child.streamRecord
: child;
if (!pendingSource.pendingSent) {
newPendingSources.add(pendingSource);
const maybePendingSource = this._publish(child);
if (maybePendingSource) {
newPendingSources.add(maybePendingSource);
}
this._publish(child);
}
if (isStreamItemsRecord(subsequentResultRecord)) {
if (subsequentResultRecord.isFinalRecord) {
Expand Down Expand Up @@ -655,14 +647,20 @@ export class IncrementalPublisher {
return result;
}

private _publish(subsequentResultRecord: SubsequentResultRecord): void {
private _publish(
subsequentResultRecord: SubsequentResultRecord,
): DeferredFragmentRecord | StreamRecord | undefined {
if (isStreamItemsRecord(subsequentResultRecord)) {
if (subsequentResultRecord.isCompleted) {
this._push(subsequentResultRecord);
return;
} else {
this._introduce(subsequentResultRecord);
}

this._introduce(subsequentResultRecord);
const stream = subsequentResultRecord.streamRecord;
if (!stream.pendingSent) {
return stream;
}
return;
}

Expand All @@ -673,6 +671,12 @@ export class IncrementalPublisher {
subsequentResultRecord.children.size > 0
) {
this._push(subsequentResultRecord);
} else {
return;
}

if (!subsequentResultRecord.pendingSent) {
return subsequentResultRecord;
}
}

Expand Down
54 changes: 54 additions & 0 deletions src/execution/__tests__/defer-test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,7 @@ const anotherNestedObject = new GraphQLObjectType({

const hero = {
name: 'Luke',
lastName: 'SkyWalker',
id: 1,
friends,
nestedObject,
Expand Down Expand Up @@ -112,6 +113,7 @@ const heroType = new GraphQLObjectType({
fields: {
id: { type: GraphQLID },
name: { type: GraphQLString },
lastName: { type: GraphQLString },
nonNullName: { type: new GraphQLNonNull(GraphQLString) },
friends: {
type: new GraphQLList(friendType),
Expand Down Expand Up @@ -566,6 +568,58 @@ describe('Execute: defer directive', () => {
]);
});

it('Separately emits defer fragments with different labels with varying subfields with superimposed masked defer', async () => {
const document = parse(`
query HeroNameQuery {
... @defer(label: "DeferID") {
hero {
id
}
}
... @defer(label: "DeferName") {
hero {
name
lastName
... @defer {
lastName
}
}
}
}
`);
const result = await complete(document);
expectJSON(result).toDeepEqual([
{
data: {},
pending: [
{ id: '0', path: [], label: 'DeferID' },
{ id: '1', path: [], label: 'DeferName' },
],
hasNext: true,
},
{
incremental: [
{
data: { hero: {} },
id: '0',
},
{
data: { id: '1' },
id: '0',
subPath: ['hero'],
},
{
data: { name: 'Luke', lastName: 'SkyWalker' },
id: '1',
subPath: ['hero'],
},
],
completed: [{ id: '0' }, { id: '1' }],
hasNext: false,
},
]);
});

it('Separately emits defer fragments with different labels with varying subfields that return promises', async () => {
const document = parse(`
query HeroNameQuery {
Expand Down
165 changes: 165 additions & 0 deletions src/execution/buildFieldPlan.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,165 @@
import { getBySet } from '../jsutils/getBySet.js';
import { isSameSet } from '../jsutils/isSameSet.js';

import type { DeferUsage, FieldDetails } from './collectFields.js';

export type DeferUsageSet = ReadonlySet<DeferUsage>;

export interface FieldGroup {
fields: ReadonlyArray<FieldDetails>;
deferUsages?: DeferUsageSet | undefined;
knownDeferUsages?: DeferUsageSet | undefined;
}

export type GroupedFieldSet = Map<string, FieldGroup>;

export interface NewGroupedFieldSetDetails {
groupedFieldSet: GroupedFieldSet;
shouldInitiateDefer: boolean;
}

export function buildFieldPlan(
fields: Map<string, ReadonlyArray<FieldDetails>>,
parentDeferUsages: DeferUsageSet = new Set<DeferUsage>(),
knownDeferUsages: DeferUsageSet = new Set<DeferUsage>(),
): {
groupedFieldSet: GroupedFieldSet;
newGroupedFieldSetDetailsMap: Map<DeferUsageSet, NewGroupedFieldSetDetails>;
newDeferUsages: ReadonlyArray<DeferUsage>;
} {
const newDeferUsages: Set<DeferUsage> = new Set<DeferUsage>();
const newKnownDeferUsages = new Set<DeferUsage>(knownDeferUsages);

const groupedFieldSet = new Map<
string,
{
fields: Array<FieldDetails>;
deferUsages: DeferUsageSet;
knownDeferUsages: DeferUsageSet;
}
>();

const newGroupedFieldSetDetailsMap = new Map<
DeferUsageSet,
{
groupedFieldSet: Map<
string,
{
fields: Array<FieldDetails>;
deferUsages: DeferUsageSet;
knownDeferUsages: DeferUsageSet;
}
>;
shouldInitiateDefer: boolean;
}
>();

const map = new Map<
string,
{
deferUsageSet: DeferUsageSet;
fieldDetailsList: ReadonlyArray<FieldDetails>;
}
>();

for (const [responseKey, fieldDetailsList] of fields) {
const deferUsageSet = new Set<DeferUsage>();
let inOriginalResult = false;
for (const fieldDetails of fieldDetailsList) {
const deferUsage = fieldDetails.deferUsage;
if (deferUsage === undefined) {
inOriginalResult = true;
continue;
}
deferUsageSet.add(deferUsage);
if (!knownDeferUsages.has(deferUsage)) {
newDeferUsages.add(deferUsage);
newKnownDeferUsages.add(deferUsage);
}
}
if (inOriginalResult) {
deferUsageSet.clear();
} else {
deferUsageSet.forEach((deferUsage) => {
const ancestors = getAncestors(deferUsage);
for (const ancestor of ancestors) {
if (deferUsageSet.has(ancestor)) {
deferUsageSet.delete(deferUsage);
}
}
});
}
map.set(responseKey, { deferUsageSet, fieldDetailsList });
}

for (const [responseKey, { deferUsageSet, fieldDetailsList }] of map) {
if (isSameSet(deferUsageSet, parentDeferUsages)) {
let fieldGroup = groupedFieldSet.get(responseKey);
if (fieldGroup === undefined) {
fieldGroup = {
fields: [],
deferUsages: deferUsageSet,
knownDeferUsages: newKnownDeferUsages,
};
groupedFieldSet.set(responseKey, fieldGroup);
}
fieldGroup.fields.push(...fieldDetailsList);
continue;
}

let newGroupedFieldSetDetails = getBySet(
newGroupedFieldSetDetailsMap,
deferUsageSet,
);
let newGroupedFieldSet;
if (newGroupedFieldSetDetails === undefined) {
newGroupedFieldSet = new Map<
string,
{
fields: Array<FieldDetails>;
deferUsages: DeferUsageSet;
knownDeferUsages: DeferUsageSet;
}
>();

newGroupedFieldSetDetails = {
groupedFieldSet: newGroupedFieldSet,
shouldInitiateDefer: Array.from(deferUsageSet).some(
(deferUsage) => !parentDeferUsages.has(deferUsage),
),
};
newGroupedFieldSetDetailsMap.set(
deferUsageSet,
newGroupedFieldSetDetails,
);
} else {
newGroupedFieldSet = newGroupedFieldSetDetails.groupedFieldSet;
}
let fieldGroup = newGroupedFieldSet.get(responseKey);
if (fieldGroup === undefined) {
fieldGroup = {
fields: [],
deferUsages: deferUsageSet,
knownDeferUsages: newKnownDeferUsages,
};
newGroupedFieldSet.set(responseKey, fieldGroup);
}
fieldGroup.fields.push(...fieldDetailsList);
}

return {
groupedFieldSet,
newGroupedFieldSetDetailsMap,
newDeferUsages: Array.from(newDeferUsages),
};
}

function getAncestors(deferUsage: DeferUsage): ReadonlyArray<DeferUsage> {
const ancestors: Array<DeferUsage> = [];
let parentDeferUsage: DeferUsage | undefined = deferUsage.parentDeferUsage;
while (parentDeferUsage !== undefined) {
ancestors.unshift(parentDeferUsage);
parentDeferUsage = parentDeferUsage.parentDeferUsage;
}
return ancestors;
}
Loading

0 comments on commit 6c701c7

Please sign in to comment.