Skip to content

Commit a1fd32d

Browse files
committed
disable star paths, add basic create mutations
1 parent 27912b9 commit a1fd32d

File tree

11 files changed

+144
-58
lines changed

11 files changed

+144
-58
lines changed

packages/core/package.json

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -35,6 +35,7 @@
3535
"lodash-es": "4.17.21",
3636
"lru-cache": "7.13.2",
3737
"matcher": "5.0.0",
38+
"nanoid": "4.0.0",
3839
"plur": "5.1.0"
3940
},
4041
"devDependencies": {

packages/core/src/generators/collectionMutations.ts

Lines changed: 30 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -2,9 +2,9 @@ import { ObjectTypeComposer, SchemaComposer } from 'graphql-compose';
22
import { merge } from 'lodash-es';
33
import {
44
CollectionContext,
5+
CollectionEntry,
56
EntryNode,
67
LoadedFlatbreadConfig,
7-
Transformer,
88
} from '../types';
99

1010
export interface AddCollectionMutationsArgs {
@@ -13,7 +13,9 @@ export interface AddCollectionMutationsArgs {
1313
config: LoadedFlatbreadConfig;
1414
objectComposer: ObjectTypeComposer;
1515
schemaComposer: SchemaComposer;
16+
collectionEntry: CollectionEntry;
1617
updateCollectionRecord: (
18+
collection: CollectionEntry,
1719
entry: EntryNode & { _metadata: CollectionContext }
1820
) => Promise<EntryNode>;
1921
}
@@ -28,16 +30,17 @@ export default function addCollectionMutations(
2830
objectComposer,
2931
schemaComposer,
3032
updateCollectionRecord,
33+
collectionEntry,
3134
} = args;
3235

3336
schemaComposer.Mutation.addFields({
3437
[`update${name}`]: {
3538
type: objectComposer,
36-
args: { [name]: objectComposer.getInputType() },
37-
description: `Update or create a ${name}`,
39+
args: { [name]: objectComposer.getInputTypeComposer() },
40+
description: `Update a ${name}`,
3841
async resolve(source, payload) {
3942
// remove _metadata to prevent injection
40-
const { _metadata, ...update } = source.author;
43+
const { _metadata, ...update } = payload[name];
4144

4245
const targetRecord = objectComposer
4346
.getResolver('findById')
@@ -47,10 +50,32 @@ export default function addCollectionMutations(
4750
delete update[targetRecord._metadata.referenceField];
4851
const newRecord = merge(targetRecord, update);
4952

50-
await updateCollectionRecord(newRecord);
53+
await updateCollectionRecord(collectionEntry, newRecord);
5154

5255
return newRecord;
5356
},
5457
},
58+
[`create${name}`]: {
59+
type: objectComposer,
60+
args: {
61+
[name]: objectComposer
62+
.getInputTypeComposer()
63+
.clone(`${name}CreateInput`)
64+
.removeField('id'),
65+
},
66+
description: `Create a ${name}`,
67+
async resolve(source, payload, args) {
68+
const record = merge(payload[name], {
69+
_metadata: {
70+
referenceField: collectionEntry.referenceField ?? 'id',
71+
collection: name,
72+
transformedBy: collectionEntry?.defaultTransformer,
73+
sourcedBy: collectionEntry?.defaultSource,
74+
},
75+
});
76+
77+
return await updateCollectionRecord(collectionEntry, record);
78+
},
79+
},
5580
});
5681
}

packages/core/src/generators/schema.ts

Lines changed: 31 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
import { schemaComposer } from 'graphql-compose';
22
import { composeWithJson } from 'graphql-compose-json';
3-
import { defaultsDeep, get, merge } from 'lodash-es';
3+
import { defaultsDeep, get, merge, set } from 'lodash-es';
4+
import { nanoid } from 'nanoid';
45
import plur from 'plur';
56
import { VFile } from 'vfile';
67

@@ -12,6 +13,7 @@ import {
1213
LoadedFlatbreadConfig,
1314
Source,
1415
Transformer,
16+
CollectionEntry,
1517
} from '../types';
1618
import { getFieldOverrides } from '../utils/fieldOverrides';
1719
import { map } from '../utils/map';
@@ -108,18 +110,33 @@ export async function generateSchema(
108110
undefined: config.transformer[0],
109111
};
110112

111-
async function updateCollectionRecord(entry: EntryNode & { _metadata: any }) {
112-
const { _metadata: ctx, ...record } = entry;
113-
const { serialize } = transformersById[ctx.transformedBy];
113+
async function updateCollectionRecord(
114+
collection: CollectionEntry,
115+
entry: EntryNode & { _metadata: any }
116+
) {
117+
const ctx = entry._metadata;
118+
const { serialize, id: transformerId } =
119+
transformersById[ctx.transformedBy];
120+
121+
if (ctx.reference) {
122+
const index = allContentNodesJSON[ctx.collection].findIndex(
123+
(c) => get(c, ctx.referenceField) === ctx.reference
124+
);
125+
126+
if (index < 0) throw new Error('Failed to find record to update');
127+
// replace in memory representation of record
128+
allContentNodesJSON[ctx.collection][index] = entry;
129+
} else {
130+
entry._metadata.reference = nanoid();
131+
set(entry, entry._metadata.referenceField, entry._metadata.reference);
132+
entry._metadata.transformedBy = transformerId;
133+
allContentNodesJSON[ctx.collection].push(entry);
134+
}
135+
136+
const { _metadata, ...record } = entry;
114137
const file = await serialize(record, ctx.transformContext);
138+
await config?.source.put(file, ctx.sourceContext, ctx);
115139

116-
await config?.source.put(file, ctx.sourceContext);
117-
const index = allContentNodesJSON[ctx.collection].findIndex(
118-
(c) => get(c, ctx.referenceField) === ctx.reference
119-
);
120-
121-
// replace in memory representation of record
122-
allContentNodesJSON[ctx.collection][index] = entry;
123140
return entry;
124141
}
125142

@@ -143,6 +160,8 @@ export async function generateSchema(
143160
/// Query resolvers
144161
//
145162

163+
// TODO: add a new type of plugin that can add resolvers to each collection, they should be called here
164+
146165
addCollectionQueries({
147166
name,
148167
pluralName,
@@ -160,6 +179,7 @@ export async function generateSchema(
160179
schemaComposer,
161180
updateCollectionRecord,
162181
config,
182+
collectionEntry: config.content.find((c) => c.name === name),
163183
});
164184
}
165185

packages/core/src/providers/test/base.test.ts

Lines changed: 37 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -66,7 +66,7 @@ test('update collection record', async (t) => {
6666
const flatbread = basicProject();
6767
const sitting = (Math.random() * 100) | 0;
6868
const result: any = await flatbread.query({
69-
rootValue: { author: { id: '2a3e', skills: { sitting } } },
69+
variableValues: { author: { id: '2a3e', skills: { sitting } } },
7070
source: `
7171
mutation UpdateAuthor($author: AuthorInput){
7272
updateAuthor(Author: $author) {
@@ -96,3 +96,39 @@ test('update collection record', async (t) => {
9696

9797
t.is(updated.data.Author.skills.sitting, sitting);
9898
});
99+
100+
test('create collection record', async (t) => {
101+
const flatbread = basicProject();
102+
const sitting = 69;
103+
const result: any = await flatbread.query({
104+
variableValues: { test: { skills: { sitting } } },
105+
source: `
106+
mutation CreateAuthor($test: AuthorCreateInput){
107+
createAuthor(Author: $test) {
108+
id
109+
skills {
110+
sitting
111+
}
112+
}
113+
}
114+
`,
115+
});
116+
117+
t.is(result.data.createAuthor.skills.sitting, sitting);
118+
119+
const updated: any = await flatbread.query({
120+
variableValues: { id: result.data.createAuthor.id },
121+
source: `
122+
query QueryAuthor($id: String) {
123+
Author(id: $id) {
124+
id
125+
skills {
126+
sitting
127+
}
128+
}
129+
}
130+
`,
131+
});
132+
133+
t.is(updated.data.Author.skills.sitting, sitting);
134+
});

packages/core/src/sources/virtual.ts

Lines changed: 8 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -39,12 +39,16 @@ export class SourceVirtual implements Source<MemContext> {
3939
}
4040
}
4141

42-
async put(doc: VFile, context: MemContext) {
43-
const record = this.data[context.collectionName].find(
44-
(entry) => entry.path === context.id
42+
async put(doc: VFile, context: MemContext, parentContext: any) {
43+
const record = this.data[parentContext.collection].find(
44+
(entry) => entry.path === parentContext.reference
4545
);
4646

47-
record.value = doc;
47+
if (record) {
48+
record.value = doc.value;
49+
} else {
50+
this.data[parentContext.collection].push(doc);
51+
}
4852

4953
return { doc, context };
5054
}

packages/core/src/types.ts

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -83,7 +83,8 @@ export interface Source<Context> {
8383
id?: string;
8484
put: (
8585
source: VFile,
86-
ctx: Context
86+
ctx: Context,
87+
parentContext: any
8788
) => Promise<{ doc: VFile; context: Context }>;
8889
fetch: (
8990
allContentTypes: LoadedCollectionEntry[],
@@ -133,6 +134,8 @@ export interface CollectionEntry {
133134
overrides?: Override[];
134135
refs?: Record<string, string>;
135136
referenceField?: string;
137+
defaultTransformer?: string;
138+
defaultSource?: string;
136139
}
137140

138141
export interface LoadedCollectionEntry extends CollectionEntry {

packages/source-filesystem/src/index.ts

Lines changed: 7 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -1,15 +1,12 @@
1-
import slugify from '@sindresorhus/slugify';
2-
import { defaultsDeep, merge } from 'lodash-es';
3-
import { read, write } from 'to-vfile';
4-
import ownPackage from '../package.json' assert { type: 'json' };
51
import type {
6-
CollectionContext,
7-
LoadedCollectionEntry,
82
FlatbreadArgs,
3+
LoadedCollectionEntry,
94
LoadedFlatbreadConfig,
10-
SourcePlugin,
115
} from '@flatbread/core';
6+
import slugify from '@sindresorhus/slugify';
7+
import { defaultsDeep } from 'lodash-es';
128
import { relative, resolve } from 'path';
9+
import { read, write } from 'to-vfile';
1310
import type { VFile } from 'vfile';
1411
import type {
1512
FileNode,
@@ -65,7 +62,7 @@ async function getAllNodes(
6562
flatbread: FlatbreadArgs<Context>,
6663
config: InitializedSourceFilesystemConfig
6764
): Promise<void> {
68-
const nodeEntries = await Promise.all(
65+
await Promise.all(
6966
allCollectionEntries.map(
7067
async (contentType): Promise<Record<string, any>> =>
7168
new Promise(async (res) =>
@@ -76,18 +73,10 @@ async function getAllNodes(
7673
)
7774
)
7875
);
79-
80-
const nodes = Object.fromEntries(
81-
nodeEntries as Iterable<readonly [PropertyKey, any]>
82-
);
8376
}
8477

85-
// TODO: _flatbread data should be extracted from plugins
86-
// plugin should return a context object and be given the same context object back when saving,
87-
// this context object will be saved internally under _flatbread[collectionId]
88-
89-
async function put(doc: VFile, context: Context) {
90-
doc.basename = context.filename;
78+
async function put(doc: VFile, context: Context, parentContext: any) {
79+
doc.basename = context?.filename ?? parentContext.reference;
9180
doc.path = resolve(process.cwd(), context.path);
9281

9382
await write(doc);

packages/source-filesystem/src/utils/gatherFileNodes.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -51,7 +51,7 @@ export default async function gatherFileNodes(
5151
) ?? ['.md', '.mdx', '.markdown'];
5252

5353
// gather all the globs in the path ( [capture-groups], **, *)
54-
const [pathPrefix, ...globs] = path.split(/\/(?:\[|\*+)/);
54+
const [pathPrefix, ...globs] = path.split(/\/(?:\[)/);
5555

5656
// for each segment - gather names for capture groups
5757
// and calculate what to remove from matches ex: [name].md => remove .md from match

packages/source-filesystem/src/utils/tests/gatherFileNodes.test.ts

Lines changed: 16 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -32,20 +32,20 @@ test('basic case', async (t) => {
3232
t.snapshot(result2);
3333
});
3434

35-
test('double level recursion', async (t) => {
36-
const result = await gatherFileNodes('deeply/**/*.md', opts);
37-
t.snapshot(result);
38-
});
35+
// test('double level recursion', async (t) => {
36+
// const result = await gatherFileNodes('deeply/**/*.md', opts);
37+
// t.snapshot(result);
38+
// });
3939

4040
test('double level recursion named', async (t) => {
4141
const result = await gatherFileNodes('deeply/[a]/[b].md', opts);
4242
t.snapshot(result);
4343
});
4444

45-
test('single level recursion', async (t) => {
46-
const result = await gatherFileNodes('./*.md', opts as any);
47-
t.snapshot(result);
48-
});
45+
// test('single level recursion', async (t) => {
46+
// const result = await gatherFileNodes('./*.md', opts as any);
47+
// t.snapshot(result);
48+
// });
4949

5050
test('double level recursion named without parent directory', async (t) => {
5151
const result = await gatherFileNodes('./[genre]/[title].md', opts);
@@ -57,15 +57,15 @@ test('single level named', async (t) => {
5757
t.snapshot(result);
5858
});
5959

60-
test('double level first named', async (t) => {
61-
const result = await gatherFileNodes('./[genre]/*.md', opts);
62-
t.snapshot(result);
63-
});
60+
// test('double level first named', async (t) => {
61+
// const result = await gatherFileNodes('./[genre]/*.md', opts);
62+
// t.snapshot(result);
63+
// });
6464

65-
test('double level second named', async (t) => {
66-
const result = await gatherFileNodes('./**/[title].md', opts);
67-
t.snapshot(result);
68-
});
65+
// test('double level second named', async (t) => {
66+
// const result = await gatherFileNodes('./**/[title].md', opts);
67+
// t.snapshot(result);
68+
// });
6969

7070
test('triple level', async (t) => {
7171
const result = await gatherFileNodes('./[random]/[name]/[title].md', opts);

packages/transformer-markdown/src/index.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -40,7 +40,7 @@ function serialize(
4040
config: MarkdownTransformerConfig
4141
) {
4242
const { _content, ...rest } = data;
43-
const doc = matter.stringify(_content.raw, rest, config.grayMatter);
43+
const doc = matter.stringify(_content?.raw ?? '', rest, config.grayMatter);
4444

4545
return new VFile(doc);
4646
}

0 commit comments

Comments
 (0)