diff --git a/examples/content/markdown/authors/Adam.md b/examples/content/markdown/authors/Adam.md
new file mode 100644
index 00000000..bf1b5e25
--- /dev/null
+++ b/examples/content/markdown/authors/Adam.md
@@ -0,0 +1,8 @@
+---
+name: Adam
+friend: Amanda
+enjoys:
+ - cats
+ - coding
+ - flatbread
+---
diff --git a/examples/content/markdown/authors/Amanda.md b/examples/content/markdown/authors/Amanda.md
new file mode 100644
index 00000000..7aaf7743
--- /dev/null
+++ b/examples/content/markdown/authors/Amanda.md
@@ -0,0 +1,7 @@
+---
+name: Amanda
+enjoys:
+ - cats
+ - coding
+ - flatbread
+---
diff --git a/examples/content/markdown/authors/daes.md b/examples/content/markdown/authors/daes.md
index 029219a5..3de9c976 100644
--- a/examples/content/markdown/authors/daes.md
+++ b/examples/content/markdown/authors/daes.md
@@ -6,7 +6,7 @@ enjoys:
- cats
- coffee
- design
-friend: 40s3
+friend: Eva
date_joined: 2021-04-22T16:41:59.558Z
skills:
sitting: 304
diff --git a/examples/content/markdown/authors/eva.md b/examples/content/markdown/authors/eva.md
index 52b4e0d8..6432e934 100644
--- a/examples/content/markdown/authors/eva.md
+++ b/examples/content/markdown/authors/eva.md
@@ -8,7 +8,7 @@ enjoys:
- mow mow
- sleepy time
- attention
-friend: 2a3e
+friend: Tony
image: eva.svg
date_joined: 2002-02-25T16:41:59.558Z
skills:
diff --git a/examples/content/markdown/authors/tony.md b/examples/content/markdown/authors/tony.md
index 416dd8ba..1cdf4d44 100644
--- a/examples/content/markdown/authors/tony.md
+++ b/examples/content/markdown/authors/tony.md
@@ -6,7 +6,7 @@ enjoys:
- cats
- tea
- making this
-friend: 40s3
+friend: Eva
image: tony.svg
date_joined: 2021-02-25T16:41:59.558Z
skills:
diff --git a/examples/content/markdown/authors/yoshi.md b/examples/content/markdown/authors/yoshi.md
index 047f60fe..fbcd95c4 100644
--- a/examples/content/markdown/authors/yoshi.md
+++ b/examples/content/markdown/authors/yoshi.md
@@ -7,7 +7,7 @@ enjoys:
- encroaching upon personal space
- being concerned
- smooth jazz
-friend: ab2c
+friend: Daes
date_joined: 2018-10-25T16:23:59.558Z
skills:
sitting: 10
diff --git a/examples/content/markdown/posts/anotha-one.md b/examples/content/markdown/posts/anotha-one.md
index 71175a86..358649bd 100644
--- a/examples/content/markdown/posts/anotha-one.md
+++ b/examples/content/markdown/posts/anotha-one.md
@@ -2,8 +2,8 @@
id: 92348fds-453fdh-59ddsd-3332-09876
title: 'Test post A'
authors:
- - 40s3
- - 2a3e
+ - Eva
+ - Tony
rating: 84.3
---
diff --git a/examples/content/markdown/posts/b.md b/examples/content/markdown/posts/b.md
index 4287b935..4ec67703 100644
--- a/examples/content/markdown/posts/b.md
+++ b/examples/content/markdown/posts/b.md
@@ -2,8 +2,8 @@
id: 2348fds-563fdh-59ddsd-3332-09876
title: 'Test post B'
authors:
- - 1111
- - ab2c
+ - Ushi
+ - Daes
rating: 44
---
diff --git a/examples/content/markdown/posts/example-post.md b/examples/content/markdown/posts/example-post.md
index c57b41f8..7e08b8fd 100644
--- a/examples/content/markdown/posts/example-post.md
+++ b/examples/content/markdown/posts/example-post.md
@@ -2,8 +2,8 @@
id: sdfsdf-23423-sdfsd-23444-dfghf
title: 'Example post of things'
authors:
- - 2a3e
- - 40s3
+ - Tony
+ - Eva
rating: 74
---
diff --git a/examples/content/markdown/posts/soup.md b/examples/content/markdown/posts/soup.md
index 1425e759..0ed9154b 100644
--- a/examples/content/markdown/posts/soup.md
+++ b/examples/content/markdown/posts/soup.md
@@ -2,8 +2,7 @@
id: jksfd4-234fdh-5345fj-3455-09836
title: 'Soup Reflection'
authors:
- - r3c6
- - ab2c
+ - Yoshi
rating: 96
---
diff --git a/examples/nextjs/flatbread.config.ts b/examples/nextjs/flatbread.config.ts
index 1d1aa896..d2ae9788 100644
--- a/examples/nextjs/flatbread.config.ts
+++ b/examples/nextjs/flatbread.config.ts
@@ -19,21 +19,21 @@ export default defineConfig({
content: [
{
path: 'content/markdown/posts',
- collection: 'Post',
+ name: 'Post',
refs: {
authors: 'Author',
},
},
{
path: 'content/markdown/posts/[category]/[slug].md',
- collection: 'PostCategory',
+ name: 'PostCategory',
refs: {
authors: 'Author',
},
},
{
path: 'content/markdown/posts/**/*.md',
- collection: 'PostCategoryBlob',
+ name: 'PostCategoryBlob',
refs: {
authors: 'Author',
},
diff --git a/examples/sveltekit/flatbread.config.js b/examples/sveltekit/flatbread.config.js
index 39c65f27..4bdc882c 100644
--- a/examples/sveltekit/flatbread.config.js
+++ b/examples/sveltekit/flatbread.config.js
@@ -19,28 +19,22 @@ export default defineConfig({
content: [
{
path: 'content/markdown/posts',
- collection: 'Post',
+ name: 'Post',
refs: {
authors: 'Author',
},
},
{
path: 'content/markdown/posts/[category]/[slug].md',
- collection: 'PostCategory',
- refs: {
- authors: 'Author',
- },
- },
- {
- path: 'content/markdown/posts/**/*.md',
- collection: 'PostCategoryBlob',
+ name: 'PostCategory',
refs: {
authors: 'Author',
},
},
{
path: 'content/markdown/authors',
- collection: 'Author',
+ name: 'Author',
+ referenceField: 'name',
refs: {
friend: 'Author',
},
@@ -54,20 +48,18 @@ export default defineConfig({
},
{
path: 'content/yaml/authors',
- collection: 'YamlAuthor',
+ name: 'YamlAuthor',
refs: {
friend: 'YamlAuthor',
},
},
{
path: 'content/markdown/deeply-nested',
- collection: 'OverrideTest',
+ name: 'OverrideTest',
overrides: [
{
field: 'deeply.nested',
type: 'String',
- test: undefined,
- test2: null,
resolve: (source) => String(source).toUpperCase(),
},
{
diff --git a/examples/sveltekit/src/routes/__layout.svelte b/examples/sveltekit/src/routes/+layout.svelte
similarity index 100%
rename from examples/sveltekit/src/routes/__layout.svelte
rename to examples/sveltekit/src/routes/+layout.svelte
diff --git a/examples/sveltekit/src/routes/+page.svelte b/examples/sveltekit/src/routes/+page.svelte
new file mode 100644
index 00000000..ba092e91
--- /dev/null
+++ b/examples/sveltekit/src/routes/+page.svelte
@@ -0,0 +1,55 @@
+
+
+
+
+
+
+ {JSON.stringify(data, null, 2)}
+
+
+
+
+ {#each data.allPostCategories as post, _ (post.id)}
+
+ {post.title}
+
+ -
+
+ {#each post.authors as author}
+
+ {/each}
+
+
+ -
+ Rating: {post.rating}
+
+
+ {@html post._content.html}
+
+ {/each}
+
+
diff --git a/examples/sveltekit/src/routes/+page.ts b/examples/sveltekit/src/routes/+page.ts
new file mode 100644
index 00000000..63210f5b
--- /dev/null
+++ b/examples/sveltekit/src/routes/+page.ts
@@ -0,0 +1,79 @@
+import { error } from '@sveltejs/kit';
+
+export const load = async ({ fetch }) => {
+ const query = `
+ query PostCategory {
+ allPostCategories (sortBy: "title", order: DESC) {
+ _metadata {
+ sourceContext {
+ filename
+ slug
+ }
+ collection
+ }
+ id
+ title
+ category
+ slug
+ rating
+ _content {
+ raw
+ html
+ excerpt
+ timeToRead
+ }
+ authors {
+ _metadata {
+ sourceContext {
+ slug
+ }
+ }
+ id
+ name
+ entity
+ enjoys
+ image {
+ srcset
+ srcsetwebp
+ srcsetavif
+ placeholder
+ aspectratio
+ }
+ friend {
+ name
+ date_joined
+ }
+ date_joined
+ skills {
+ sitting
+ breathing
+ liquid_consumption
+ existence
+ sports
+ }
+ }
+ }
+ }
+ `;
+
+ try {
+ const response = await fetch('http://localhost:5057/graphql', {
+ body: JSON.stringify({
+ query,
+ }),
+ headers: {
+ 'Content-Type': 'application/json',
+ Accept: 'application/json',
+ },
+ method: 'POST',
+ });
+
+ const { data, errors } = await response.json();
+
+ if (errors)
+ throw error(500, errors.map(({ message }) => message).join('\\n'));
+ return data;
+ } catch (e) {
+ throw error(500, 'Failed to load data');
+ }
+};
diff --git a/examples/sveltekit/src/routes/index.svelte b/examples/sveltekit/src/routes/index.svelte
deleted file mode 100644
index 67cd7ec2..00000000
--- a/examples/sveltekit/src/routes/index.svelte
+++ /dev/null
@@ -1,135 +0,0 @@
-
-
-
-
-
-
-
-
- {JSON.stringify(data, null, 2)}
-
-
-
-
- {#each data.allPostCategories as post, _ (post.id)}
-
- {post.title}
-
- -
-
- {#each post.authors as author}
-
- {/each}
-
-
- -
- Rating: {post.rating}
-
-
- {@html post._content.html}
-
- {/each}
-
-
diff --git a/package.json b/package.json
index d8d33df0..f47015b7 100644
--- a/package.json
+++ b/package.json
@@ -16,7 +16,7 @@
"lint:prettier": "prettier --check --plugin-search-dir=. .",
"lint": "pnpm lint:prettier",
"lint:fix": "pnpm lint:fix:prettier",
- "lint:fix:prettier": "pretty-quick --staged",
+ "lint:fix:prettier": "prettier --write --plugin-search-dir=. .",
"play": "cd examples/sveltekit && pnpm dev",
"play:build": "pnpm build && cd examples/sveltekit && pnpm build",
"prepublish:ci": "pnpm -r update",
diff --git a/packages/core/package.json b/packages/core/package.json
index b49d91f8..446e7d98 100644
--- a/packages/core/package.json
+++ b/packages/core/package.json
@@ -35,6 +35,7 @@
"lodash-es": "4.17.21",
"lru-cache": "7.13.2",
"matcher": "5.0.0",
+ "nanoid": "4.0.0",
"plur": "5.1.0"
},
"devDependencies": {
diff --git a/packages/core/src/cache/cache.ts b/packages/core/src/cache/cache.ts
index 6d40d382..fe8fc54c 100644
--- a/packages/core/src/cache/cache.ts
+++ b/packages/core/src/cache/cache.ts
@@ -1,8 +1,7 @@
import { GraphQLSchema } from 'graphql';
import LRU from 'lru-cache';
-import { createHash } from 'node:crypto';
import { LoadedFlatbreadConfig } from '../types';
-import { anyToString } from '../utils/stringUtils';
+import createShaHash from '../utils/createShaHash';
type SchemaCacheKey = string;
@@ -29,7 +28,7 @@ export function cacheSchema(
config: LoadedFlatbreadConfig,
schema: GraphQLSchema
) {
- const schemaHashKey = getSchemaHash(config);
+ const schemaHashKey = createShaHash(config);
cache.schema.set(schemaHashKey, schema);
}
@@ -39,13 +38,6 @@ export function cacheSchema(
export function checkCacheForSchema(
config: LoadedFlatbreadConfig
): GraphQLSchema | undefined {
- const schemaHashKey = getSchemaHash(config);
+ const schemaHashKey = createShaHash(config);
return cache.schema.get(schemaHashKey);
}
-
-/**
- * Generates a hash key for a given Flatbread config.
- */
-export function getSchemaHash(config: LoadedFlatbreadConfig) {
- return createHash('md5').update(anyToString(config)).digest('hex');
-}
diff --git a/packages/core/src/errors.ts b/packages/core/src/errors.ts
index 106e5e96..f36a1d19 100644
--- a/packages/core/src/errors.ts
+++ b/packages/core/src/errors.ts
@@ -2,8 +2,7 @@ import { outdent } from './utils/outdent';
export class IllegalFieldNameError extends Error {
constructor(illegalSequence: string) {
- super();
- this.message = outdent`
+ super(outdent`
The sequence "${illegalSequence}" is reserved and not allowed in field names
Either:
- remove all instances of "${illegalSequence}" in the names of fields in your content
@@ -13,6 +12,24 @@ export class IllegalFieldNameError extends Error {
...,
fieldNameTransform: (value) => value.replaceAll("${illegalSequence}",'-')
}
- `;
+ `);
+ }
+}
+
+export class ReferenceAlreadyExistsError<
+ K extends Record
+> extends Error {
+ constructor(
+ payload: K,
+ collectionName: string,
+ metadata: { referenceField: string; reference: string }
+ ) {
+ const payloadString = JSON.stringify(payload, null, 2);
+ super(
+ outdent`
+ Failed to create
+ ${payloadString}
+ ${collectionName} with ${metadata.referenceField} of ${metadata.reference} already exists`
+ );
}
}
diff --git a/packages/core/src/generators/arguments.ts b/packages/core/src/generators/arguments.ts
index f6c1ded9..818d636a 100644
--- a/packages/core/src/generators/arguments.ts
+++ b/packages/core/src/generators/arguments.ts
@@ -17,7 +17,7 @@ export const generateArgsForAllItemQuery = (pluralType: string) => ({
* @param pluralType plural name of the content type
*/
export const generateArgsForManyItemQuery = (pluralType: string) => ({
- ids: {
+ references: {
type: '[String]',
},
...skip(),
@@ -26,16 +26,6 @@ export const generateArgsForManyItemQuery = (pluralType: string) => ({
...sortBy(pluralType),
});
-/**
- * Generates the accepted arguments for a 'single-item' query on a content type.
- *
- */
-export const generateArgsForSingleItemQuery = () => ({
- id: {
- type: 'String',
- },
-});
-
/**
* Argument for skipping the first `n` items from the query results.
*/
diff --git a/packages/core/src/generators/collectionMutations.ts b/packages/core/src/generators/collectionMutations.ts
new file mode 100644
index 00000000..e8f967e5
--- /dev/null
+++ b/packages/core/src/generators/collectionMutations.ts
@@ -0,0 +1,141 @@
+import { ObjectTypeComposer, SchemaComposer } from 'graphql-compose';
+import { get, merge } from 'lodash-es';
+import { ReferenceAlreadyExistsError } from '../errors';
+import {
+ CollectionContext,
+ CollectionEntry,
+ EntryNode,
+ LoadedCollectionEntry,
+ LoadedFlatbreadConfig,
+} from '../types';
+
+export interface AddCollectionMutationsArgs {
+ name: string;
+ pluralName: string;
+ config: LoadedFlatbreadConfig;
+ objectComposer: ObjectTypeComposer;
+ schemaComposer: SchemaComposer;
+ collectionEntry: LoadedCollectionEntry;
+ updateCollectionRecord: (
+ collection: CollectionEntry,
+ entry: EntryNode & { _metadata: Partial }
+ ) => Promise;
+}
+
+export default function addCollectionMutations(
+ args: AddCollectionMutationsArgs
+): void {
+ const {
+ name,
+ objectComposer,
+ schemaComposer,
+ updateCollectionRecord,
+ collectionEntry,
+ } = args;
+
+ async function update(
+ payload: Record,
+ existing: EntryNode
+ ) {
+ // remove _metadata to prevent injection
+ const { _metadata, ...update } = payload?.[name];
+
+ // remove supplied key (might not be required)
+ delete update[existing._metadata.referenceField];
+ const newRecord = merge(existing, update);
+
+ await updateCollectionRecord(
+ collectionEntry,
+ newRecord as EntryNode & { _metadata: Partial }
+ );
+
+ return newRecord;
+ }
+
+ async function create(source: unknown, payload: Record) {
+ const existingRecordWithId = await objectComposer
+ .getResolver('findByReferenceField')
+ .resolve({ args: payload[name] });
+
+ if (existingRecordWithId) {
+ throw new ReferenceAlreadyExistsError(
+ payload[name],
+ name,
+ existingRecordWithId._metadata
+ );
+ }
+
+ collectionEntry.creationRequiredFields.forEach((field) => {
+ // const
+
+ if (Object.hasOwn(payload[name], field))
+ throw new Error(
+ `field ${field} is required when creating a new ${name}`
+ );
+ });
+
+ const record = merge(payload[name], {
+ _metadata: {
+ referenceField: collectionEntry.referenceField ?? 'id',
+ collection: name,
+ transformedBy: collectionEntry?.defaultTransformer,
+ sourcedBy: collectionEntry?.defaultSource,
+ } as CollectionContext,
+ });
+
+ return await updateCollectionRecord(collectionEntry, record);
+ }
+
+ schemaComposer.Mutation.addFields({
+ [`update${name}`]: {
+ type: objectComposer,
+ args: {
+ [name]: objectComposer
+ .getInputTypeComposer()
+ .makeFieldNonNull(collectionEntry.creationRequiredFields),
+ },
+ description: `Update a ${name}`,
+ async resolve(source: unknown, payload: Record) {
+ const { _metadata, ...args } = payload?.[name];
+
+ const existingRecord = objectComposer
+ .getResolver('findByReferenceField')
+ .resolve({ args });
+
+ if (!existingRecord)
+ throw new Error(
+ `${name} with ${collectionEntry.referenceField} of ${get(
+ args,
+ collectionEntry.referenceField
+ )} not found`
+ );
+ return update(payload, existingRecord);
+ },
+ update,
+ },
+ [`create${name}`]: {
+ type: objectComposer,
+ args: {
+ [name]: objectComposer
+ .getInputTypeComposer()
+ .makeFieldNonNull(collectionEntry.creationRequiredFields),
+ },
+ description: `Create a ${name}`,
+ resolve: create,
+ },
+ [`upsert${name}`]: {
+ type: objectComposer,
+ args: { [name]: objectComposer.getInputTypeComposer() },
+ async resolve(source: unknown, payload: Record) {
+ const { _metadata, ...args } = payload?.[name];
+
+ const existingRecord = objectComposer
+ .getResolver('findByReferenceField')
+ .resolve({ args });
+
+ if (existingRecord) return update(payload, existingRecord);
+ create(source, payload);
+ },
+ },
+ });
+}
diff --git a/packages/core/src/generators/collectionQueries.ts b/packages/core/src/generators/collectionQueries.ts
new file mode 100644
index 00000000..462c5484
--- /dev/null
+++ b/packages/core/src/generators/collectionQueries.ts
@@ -0,0 +1,109 @@
+import { ObjectTypeComposer, SchemaComposer } from 'graphql-compose';
+import resolveQueryArgs from '../resolvers/arguments';
+
+import { cloneDeep, get } from 'lodash-es';
+import {
+ generateArgsForAllItemQuery,
+ generateArgsForManyItemQuery,
+} from '../generators/arguments';
+import {
+ EntryNode,
+ LoadedCollectionEntry,
+ LoadedFlatbreadConfig,
+ Transformer,
+} from '../types';
+
+export interface AddCollectionQueriesArgs {
+ name: string;
+ pluralName: string;
+ config: LoadedFlatbreadConfig;
+ objectComposer: ObjectTypeComposer;
+ schemaComposer: SchemaComposer;
+ allContentNodesJSON: Record;
+ transformersById: Record;
+ collectionEntry: LoadedCollectionEntry;
+}
+
+export default function addCollectionQueries(args: AddCollectionQueriesArgs) {
+ const {
+ name,
+ pluralName,
+ config,
+ objectComposer,
+ schemaComposer,
+ collectionEntry,
+ allContentNodesJSON,
+ } = args;
+
+ const pluralTypeQueryName = 'all' + pluralName;
+
+ objectComposer.addResolver({
+ name: 'findByReferenceField',
+ type: () => objectComposer,
+ description: `Find one ${name} by its ${collectionEntry.referenceField}`,
+ args: {
+ [collectionEntry.referenceField]: objectComposer
+ .getInputTypeComposer()
+ .getField(collectionEntry.referenceField),
+ },
+ resolve: (rp: Record) =>
+ cloneDeep(
+ allContentNodesJSON[name].find(
+ (node: EntryNode) =>
+ node[collectionEntry.referenceField] ===
+ rp.args[collectionEntry.referenceField]
+ )
+ ),
+ });
+
+ objectComposer.addResolver({
+ name: 'findMany',
+ type: () => [objectComposer],
+ description: `Find many ${pluralName} by their ${collectionEntry.referenceField}`,
+ args: generateArgsForManyItemQuery(pluralName),
+ resolve: (rp: Record) => {
+ const referencesToFind = rp.args.references ?? [];
+ const matches =
+ cloneDeep(allContentNodesJSON[name])?.filter((node: EntryNode) =>
+ referencesToFind?.includes(get(node, collectionEntry.referenceField))
+ ) ?? [];
+ return resolveQueryArgs(matches, rp.args, config, {
+ collectionEntry,
+ type: {
+ name: name,
+ pluralName: pluralName,
+ pluralQueryName: pluralTypeQueryName,
+ },
+ });
+ },
+ });
+
+ objectComposer.addResolver({
+ name: 'all',
+ args: generateArgsForAllItemQuery(pluralName),
+ type: () => [objectComposer],
+ description: `Return a set of ${pluralName}`,
+ resolve: (rp: Record) => {
+ const nodes = cloneDeep(allContentNodesJSON[name]);
+ return resolveQueryArgs(nodes, rp.args, config, {
+ collectionEntry,
+ type: {
+ name: name,
+ pluralName: pluralName,
+ pluralQueryName: pluralTypeQueryName,
+ },
+ });
+ },
+ });
+
+ schemaComposer.Query.addFields({
+ /**
+ * Add find by ID to each content type
+ */
+ [name]: objectComposer.getResolver('findByReferenceField'),
+ /**
+ * Add find 'many' to each content type
+ */
+ [pluralTypeQueryName]: objectComposer.getResolver('all'),
+ });
+}
diff --git a/packages/core/src/generators/generateCollection.ts b/packages/core/src/generators/generateCollection.ts
index d939fba9..afd262bd 100644
--- a/packages/core/src/generators/generateCollection.ts
+++ b/packages/core/src/generators/generateCollection.ts
@@ -19,7 +19,7 @@ export function generateCollection({
return transformKeys(
defaultsDeep(
{},
- getFieldOverrides(collection, config),
+ getFieldOverrides(collection, config.content),
...nodes.map((node) => merge({}, node, preknownSchemaFragments))
),
config.fieldNameTransform
diff --git a/packages/core/src/generators/schema.ts b/packages/core/src/generators/schema.ts
index 72dba8a3..17cb09f4 100644
--- a/packages/core/src/generators/schema.ts
+++ b/packages/core/src/generators/schema.ts
@@ -1,29 +1,24 @@
import { schemaComposer } from 'graphql-compose';
import { composeWithJson } from 'graphql-compose-json';
-import { cloneDeep, merge } from 'lodash-es';
+import { get, merge, set } from 'lodash-es';
import plur from 'plur';
import { VFile } from 'vfile';
import { cacheSchema, checkCacheForSchema } from '../cache/cache';
+
import {
- generateArgsForAllItemQuery,
- generateArgsForManyItemQuery,
- generateArgsForSingleItemQuery,
-} from '../generators/arguments';
-import resolveQueryArgs from '../resolvers/arguments';
-import {
+ CollectionEntry,
ConfigResult,
EntryNode,
+ LoadedCollectionEntry,
LoadedFlatbreadConfig,
Transformer,
} from '../types';
+import { createUniqueId } from '../utils/createUniqueId';
import { map } from '../utils/map';
+import addCollectionMutations from './collectionMutations';
+import addCollectionQueries from './collectionQueries';
import { generateCollection } from './generateCollection';
-interface RootQueries {
- maybeReturnsSingleItem: string[];
- maybeReturnsList: string[];
-}
-
/**
* Generates a GraphQL schema from content nodes.
*
@@ -48,7 +43,53 @@ export async function generateSchema(
config.source.initialize?.(config);
// Invoke the content source resolver to retrieve the content nodes
- const allContentNodes = await config.source.fetch(config.content);
+ const allContentNodes: Record = {};
+
+ const collectionEntriesByName = Object.fromEntries(
+ config.content.map((collection: LoadedCollectionEntry) => [
+ collection.name,
+ collection,
+ ])
+ );
+
+ const addRecord =
+ (sourceId: string) =>
+ (
+ collection: LoadedCollectionEntry,
+ record: EntryNode,
+ context: Ctx
+ ) => {
+ allContentNodes[collection.name] = allContentNodes[collection.name] ?? [];
+
+ const newRecord = {
+ record,
+ context: {
+ sourceContext: context,
+ sourcedBy: sourceId,
+ collection: collection.name,
+ referenceField: collection.referenceField ?? 'id',
+ },
+ };
+
+ allContentNodes[collection.name].push(newRecord);
+ return newRecord;
+ };
+
+ function addCreationRequiredFields(
+ collection: CollectionEntry,
+ fields: string[]
+ ): void {
+ if (!collectionEntriesByName[collection.name])
+ throw new Error(`Couldn't find collection ${collection.name}`);
+ collectionEntriesByName?.[collection.name]?.creationRequiredFields?.push(
+ ...fields
+ );
+ }
+
+ await config.source.fetch(config.content, {
+ addRecord: addRecord(config.source.id as string),
+ addCreationRequiredFields,
+ });
// Transform the content nodes to the expected JSON format if needed
const allContentNodesJSON = optionallyTransformContentNodes(
@@ -80,28 +121,67 @@ export async function generateSchema(
])
);
- /**
- * @todo potentially able to remove this
- **/
- let queries: RootQueries = {
- maybeReturnsSingleItem: [],
- maybeReturnsList: [],
+ const transformersById = {
+ ...Object.fromEntries(
+ config.transformer.map((transformer) => [transformer.id, transformer])
+ ),
+ // this will be the default for collections that aren't already `transformedBy` anything
+ undefined: config.transformer[0],
};
+ async function updateCollectionRecord(
+ collection: CollectionEntry,
+ entry: EntryNode & { _metadata: any }
+ ) {
+ const ctx = entry._metadata;
+ const {
+ serialize,
+ extensions,
+ id: transformerId,
+ } = transformersById[ctx.transformedBy];
+
+ if (ctx.reference) {
+ const index = allContentNodesJSON[ctx.collection].findIndex(
+ (c) => get(c, ctx.referenceField) === ctx.reference
+ );
+
+ if (index < 0) throw new Error('Failed to find record to update');
+ // replace in memory representation of record
+ allContentNodesJSON[ctx.collection][index] = entry;
+ } else {
+ const reference = get(entry, entry._metadata.referenceField);
+ entry._metadata.reference = reference ?? createUniqueId();
+ if (!reference)
+ set(entry, entry._metadata.referenceField, entry._metadata.reference);
+ entry._metadata.transformedBy = transformerId;
+ entry._metadata.extension = extensions?.[0];
+ allContentNodesJSON[ctx.collection].push(entry);
+ }
+
+ const { _metadata, ...record } = entry;
+ const file = await serialize(record, ctx.transformContext);
+ await config?.source.put(file, ctx.sourceContext, {
+ parentContext: ctx,
+ collection,
+ record,
+ });
+
+ return entry;
+ }
+
// Main builder loop - iterate through each content type and generate query resolvers + relationships for it
- for (const [type, schema] of Object.entries(schemaArray)) {
- const pluralType = plur(type, 2);
- const pluralTypeQueryName = 'all' + pluralType;
+ for (const [name, objectComposer] of Object.entries(schemaArray)) {
+ const pluralName = plur(name, 2);
//
/// Global meta fields
//
- schema.addFields({
+ objectComposer.addFields({
_collection: {
type: 'String',
description: 'The collection name',
- resolve: () => type,
+ resolve: () => name,
},
});
@@ -109,82 +189,39 @@ export async function generateSchema(
/// Query resolvers
//
- schema.addResolver({
- name: 'findById',
- type: () => schema,
- description: `Find one ${type} by its ID`,
- args: generateArgsForSingleItemQuery(),
- resolve: (rp: Record) =>
- cloneDeep(allContentNodesJSON[type]).find(
- (node: EntryNode) => node.id === rp.args.id
- ),
- });
-
- schema.addResolver({
- name: 'findMany',
- type: () => [schema],
- description: `Find many ${pluralType} by their IDs`,
- args: generateArgsForManyItemQuery(pluralType),
- resolve: (rp: Record) => {
- const idsToFind = rp.args.ids ?? [];
- const matches =
- cloneDeep(allContentNodesJSON[type])?.filter((node: EntryNode) =>
- idsToFind?.includes(node.id)
- ) ?? [];
- return resolveQueryArgs(matches, rp.args, config, {
- type: {
- name: type,
- pluralName: pluralType,
- pluralQueryName: pluralTypeQueryName,
- },
- });
- },
- });
-
- schema.addResolver({
- name: 'all',
- args: generateArgsForAllItemQuery(pluralType),
- type: () => [schema],
- description: `Return a set of ${pluralType}`,
- resolve: (rp: Record) => {
- const nodes = cloneDeep(allContentNodesJSON[type]);
- return resolveQueryArgs(nodes, rp.args, config, {
- type: {
- name: type,
- pluralName: pluralType,
- pluralQueryName: pluralTypeQueryName,
- },
- });
- },
+ // TODO: add a new type of plugin that can add resolvers to each collection, they should be called here
+
+ addCollectionQueries({
+ name,
+ pluralName,
+ objectComposer,
+ schemaComposer,
+ transformersById,
+ allContentNodesJSON,
+ collectionEntry: collectionEntriesByName[name],
+ config,
});
- schemaComposer.Query.addFields({
- /**
- * Add find by ID to each content type
- */
- [type]: schema.getResolver('findById'),
- /**
- * Add find 'many' to each content type
- */
- [pluralTypeQueryName]: schema.getResolver('all'),
+ addCollectionMutations({
+ name,
+ pluralName,
+ objectComposer,
+ schemaComposer,
+ updateCollectionRecord,
+ config,
+ collectionEntry: collectionEntriesByName[name],
});
-
- /**
- * Separate the queries by return type for later use when wrapping the query resolvers
- * @todo potentially able to remove this
- **/
- queries.maybeReturnsSingleItem.push(type);
- queries.maybeReturnsList.push(pluralTypeQueryName);
}
// Create map of references on each content node
- for (const { collection, refs } of config.content) {
- const typeTC = schemaComposer.getOTC(collection);
+ for (const { name, refs } of config.content) {
+ const typeTC = schemaComposer.getOTC(name);
if (!refs) continue;
Object.entries(refs).forEach(([refField, refType]) => {
const refTypeTC = schemaComposer.getOTC(refType);
+ const refCollectionEntry = collectionEntriesByName[refType];
// If the current content type has this valid reference field as declared in the config, we'll add a resolver for this reference
if (!typeTC.hasField(refField)) return;
@@ -196,20 +233,21 @@ export async function generateSchema(
description: `All ${plur(
String(refType),
2
- )} that are referenced by this ${collection}`,
+ )} that are referenced by this ${name}`,
resolver: () => refTypeTC.getResolver('findMany'),
prepareArgs: {
- ids: (source) => source[refField],
+ references: (source) => source[refField],
},
projection: { [refField]: true },
});
} else {
// If the reference field has a single node
typeTC.addRelation(refField, {
- description: `The ${refType} referenced by this ${collection}`,
- resolver: () => refTypeTC.getResolver('findById'),
+ description: `The ${refType} referenced by this ${name}`,
+ resolver: () => refTypeTC.getResolver('findByReferenceField'),
prepareArgs: {
- id: (source) => source[refField],
+ [refCollectionEntry.referenceField]: (source: EntryNode) =>
+ source[refField],
},
projection: { [refField]: true },
});
@@ -242,11 +280,9 @@ const fetchPreknownSchemaFragments = (
function getTransformerExtensionMap(transformer: Transformer[]) {
const transformerMap = new Map();
- transformer.forEach((t) => {
- t.extensions.forEach((extension) => {
- transformerMap.set(extension, t);
- });
- });
+ transformer.forEach((t) =>
+ t.extensions.forEach((extension) => transformerMap.set(extension, t))
+ );
return transformerMap;
}
@@ -272,14 +308,20 @@ const optionallyTransformContentNodes = (
* @todo if this becomes a performance bottleneck, consider overloading the source plugin API to accept a transform function so we can avoid mapping through the content nodes twice
* */
- return map(allContentNodes, (node: VFile) => {
- const transformer = transformerMap.get(node.extname);
+ return map(allContentNodes, (node: { record: VFile; context: any }) => {
+ const transformer = transformerMap.get(node.record.extname);
if (!transformer?.parse) {
- throw new Error(`no transformer found for ${node.path}`);
+ throw new Error(`no transformer found for ${node.record.path}`);
}
- return transformer.parse(node);
+ const { record: doc, context } = transformer.parse(node.record);
+ doc._metadata = node.context;
+ doc._metadata.transformedBy = transformer.id;
+ doc._metadata.transformContext = context;
+ doc._metadata.reference = get(doc, node.context.referenceField);
+ return doc;
});
}
+ // TODO: might need to map this to attach metadata here
return allContentNodes;
};
diff --git a/packages/core/src/providers/test/base.test.ts b/packages/core/src/providers/test/base.test.ts
index 145c2f45..1f18f1ee 100644
--- a/packages/core/src/providers/test/base.test.ts
+++ b/packages/core/src/providers/test/base.test.ts
@@ -1,11 +1,14 @@
-import test from 'ava';
-import filesystem from '@flatbread/source-filesystem';
import markdownTransformer from '@flatbread/transformer-markdown';
+import test from 'ava';
+import { SourceVirtual } from '../../sources/virtual';
import { FlatbreadProvider } from '../base';
+import { mockData } from './mockData';
+
+const sourceVirtual = new SourceVirtual(mockData);
function basicProject() {
return new FlatbreadProvider({
- source: filesystem(),
+ source: sourceVirtual,
transformer: markdownTransformer({
markdown: {
gfm: true,
@@ -16,7 +19,7 @@ function basicProject() {
content: [
{
path: 'examples/content/markdown/authors',
- collection: 'Author',
+ name: 'Author',
refs: {
friend: 'Author',
},
@@ -48,7 +51,7 @@ test('relational filter query', async (t) => {
const result = await flatbread.query({
source: `
query AllAuthors {
- allAuthors(filter: {friend: {name: {eq: "Eva"}}}) {
+ allAuthors(filter: {friend: {name: {wildcard: "Ev*"}}}) {
name
enjoys
}
@@ -58,3 +61,94 @@ test('relational filter query', async (t) => {
t.snapshot(result);
});
+
+test('update collection record', async (t) => {
+ const flatbread = basicProject();
+ const sitting = (Math.random() * 100) | 0;
+ const result: any = await flatbread.query({
+ variableValues: { author: { id: '2a3e', skills: { sitting } } },
+ source: `
+ mutation UpdateAuthor($author: AuthorInput){
+ updateAuthor(Author: $author) {
+ id
+ skills {
+ sitting
+ }
+ }
+ }
+ `,
+ });
+
+ t.is(result.data.updateAuthor.skills.sitting, sitting);
+
+ const updated: any = await flatbread.query({
+ source: `
+ query {
+ Author(id: "2a3e") {
+ id
+ skills {
+ sitting
+ }
+ }
+ }
+ `,
+ });
+
+ t.is(updated.data.Author.skills.sitting, sitting);
+});
+
+test('create collection record', async (t) => {
+ const flatbread = basicProject();
+ const sitting = 69;
+ const result: any = await flatbread.query({
+ variableValues: { test: { skills: { sitting } } },
+ source: `
+ mutation CreateAuthor($test: AuthorInput){
+ createAuthor(Author: $test) {
+ id
+ skills {
+ sitting
+ }
+ }
+ }
+ `,
+ });
+
+ t.is(result.data.createAuthor.skills.sitting, sitting);
+
+ const updated: any = await flatbread.query({
+ variableValues: { id: result.data.createAuthor.id },
+ source: `
+ query QueryAuthor($id: String) {
+ Author(id: $id) {
+ id
+ skills {
+ sitting
+ }
+ }
+ }
+ `,
+ });
+
+ t.is(updated.data.Author.skills.sitting, sitting);
+});
+
+test('prevents creating record with duplicate reference', async (t) => {
+ const flatbread = basicProject();
+
+ const result = await flatbread.query({
+ variableValues: { test: { id: '2a3e' } },
+ source: `
+ mutation CreateAuthor($test: AuthorInput){
+ createAuthor(Author: $test) {
+ id
+ skills {
+ sitting
+ }
+ }
+ }
+ `,
+ });
+
+ t.snapshot(result.errors);
+});
diff --git a/packages/core/src/providers/test/mockData.ts b/packages/core/src/providers/test/mockData.ts
new file mode 100644
index 00000000..44180443
--- /dev/null
+++ b/packages/core/src/providers/test/mockData.ts
@@ -0,0 +1,75 @@
+import { VFile } from 'vfile';
+
+export const mockData = {
+ Author: [
+ new VFile({
+ path: '/content/authors/eva.md',
+ extname: '.md',
+ value: `---
+ id: 40s3
+ name: Eva
+ entity: Cat
+ enjoys:
+ - sitting
+ - standing
+ - mow mow
+ - sleepy time
+ - attention
+ friend: 2a3e
+ image: eva.svg
+ date_joined: 2002-02-25T16:41:59.558Z
+ skills:
+ sitting: 100000
+ breathing: 4.7
+ liquid_consumption: 10
+ existence: funky
+ sports: -200
+---
+ `,
+ }),
+ new VFile({
+ path: '/content/authors/tony.md',
+ extname: '.md',
+ value: `---
+ name: Tony
+ id: 2a3e
+ friend: ab2c
+ enjoys:
+ - cats
+ - tea
+ - making this
+ date_joined: 2021-02-25T16:41:59.558Z
+ skills:
+ sitting: 71
+ breathing: 7.07
+ liquid_consumption: 100
+ existence: simulation
+ sports: -2
+ cat_pat: 1500
+---
+ `,
+ }),
+ new VFile({
+ path: '/content/authors/daes.md',
+ extname: '.md',
+ value: `---
+ id: ab2c
+ name: Daes
+ entity: Human
+ enjoys:
+ - cats
+ - coffee
+ - design
+ friend: 40s3
+ date_joined: 2021-04-22T16:41:59.558Z
+ skills:
+ sitting: 304
+ breathing: 1.034234
+ liquid_consumption: -100
+ existence: etheral
+ sports: 3
+---
+ `,
+ }),
+ ],
+};
diff --git a/packages/core/src/providers/test/snapshots/base.test.ts.md b/packages/core/src/providers/test/snapshots/base.test.ts.md
index 8229fe4a..509391d8 100644
--- a/packages/core/src/providers/test/snapshots/base.test.ts.md
+++ b/packages/core/src/providers/test/snapshots/base.test.ts.md
@@ -11,14 +11,6 @@ Generated by [AVA](https://avajs.dev).
{
data: {
allAuthors: [
- {
- enjoys: [
- 'cats',
- 'coffee',
- 'design',
- ],
- name: 'Daes',
- },
{
enjoys: [
'sitting',
@@ -39,20 +31,11 @@ Generated by [AVA](https://avajs.dev).
},
{
enjoys: [
- 'peeing in the cat tower',
- 'being shaped like an egg',
- 'violence',
- ],
- name: 'Ushi',
- },
- {
- enjoys: [
- 'talking',
- 'encroaching upon personal space',
- 'being concerned',
- 'smooth jazz',
+ 'cats',
+ 'coffee',
+ 'design',
],
- name: 'Yoshi',
+ name: 'Daes',
},
],
},
@@ -73,14 +56,30 @@ Generated by [AVA](https://avajs.dev).
],
name: 'Daes',
},
+ ],
+ },
+ }
+
+## prevents creating record with duplicate reference
+
+> Snapshot 1
+
+ [
+ GraphQLError {
+ extensions: {},
+ locations: [
{
- enjoys: [
- 'cats',
- 'tea',
- 'making this',
- ],
- name: 'Tony',
+ column: 9,
+ line: 3,
},
],
+ message: `Failed to create␊
+ {␊
+ "id": "2a3e"␊
+ }␊
+ Author with id of 2a3e already exists`,
+ path: [
+ 'createAuthor',
+ ],
},
- }
+ ]
diff --git a/packages/core/src/providers/test/snapshots/base.test.ts.snap b/packages/core/src/providers/test/snapshots/base.test.ts.snap
index 7b8489be..fd6c482f 100644
Binary files a/packages/core/src/providers/test/snapshots/base.test.ts.snap and b/packages/core/src/providers/test/snapshots/base.test.ts.snap differ
diff --git a/packages/core/src/resolvers/arguments.ts b/packages/core/src/resolvers/arguments.ts
index 9c727db0..c306009f 100644
--- a/packages/core/src/resolvers/arguments.ts
+++ b/packages/core/src/resolvers/arguments.ts
@@ -1,11 +1,12 @@
-import { keyBy } from 'lodash-es';
+import { get, keyBy } from 'lodash-es';
import sift, {
generateFilterSetManifest,
TargetAndComparator,
} from '../utils/sift';
-import { ContentNode, FlatbreadConfig } from '../types';
+import { ContentNode, FlatbreadConfig, LoadedCollectionEntry } from '../types';
import { FlatbreadProvider } from '../providers/base';
interface ResolveQueryArgsOptions {
+ collectionEntry: LoadedCollectionEntry;
type: {
name: string;
pluralName: string;
@@ -27,14 +28,20 @@ const resolveQueryArgs = async (
if (filter) {
// Place the nodes into a keyed object by ID so we can easily filter by ID without doing tons of looping.
// TODO: store all nodes in an ID-keyed object.
- // TODO: replace id field with user-defined/fallback identifier field.
- const nodeById = keyBy(nodes, 'id');
+ const nodeByReference = keyBy(
+ nodes,
+ options.collectionEntry.referenceField
+ );
// Turn the filter into a GraphQL subquery that returns an array of matching content node IDs.
- const listOfNodeIDsToFilter = await resolveFilter(filter, config, options);
+ const listOfRecordReferencesToFilter = await resolveFilter(
+ filter,
+ config,
+ options
+ );
- nodes = listOfNodeIDsToFilter.map(
- (desiredNodeId) => nodeById[desiredNodeId]
+ nodes = listOfRecordReferencesToFilter.map(
+ (desiredNodeReference) => nodeByReference[desiredNodeReference]
);
}
@@ -83,7 +90,7 @@ const resolveQueryArgs = async (
*
*/
function buildFilterQueryFragment(filterSetManifest: TargetAndComparator) {
- let filterToQuery = [];
+ const filterToQuery = [];
for (const filter of filterSetManifest) {
let graphQLFieldAccessor = '';
@@ -134,11 +141,10 @@ export const resolveFilter = async (
// Build a GraphQL query fragment that will be used to resolve content nodes in a structure expected by the sift function, for the given filter.
const filterQueryFragment = buildFilterQueryFragment(filterSetManifest);
- // TODO: replace id field with user-defined/fallback identifier field
const queryString = `
query ${options.type.pluralQueryName}_FilterSubquery {
${options.type.pluralQueryName} {
- id
+ ${options.collectionEntry.referenceField}
${filterQueryFragment}
}
}
@@ -150,7 +156,12 @@ export const resolveFilter = async (
const result = data?.[options.type.pluralQueryName] as ContentNode[];
- return result.filter(sift(filter)).map((node) => node.id);
+ return result
+ .filter(sift(filter))
+ .map(
+ (node) =>
+ get(node, options.collectionEntry.referenceField) as string | number
+ );
};
/**
diff --git a/packages/core/src/sources/virtual.ts b/packages/core/src/sources/virtual.ts
new file mode 100644
index 00000000..d2be745a
--- /dev/null
+++ b/packages/core/src/sources/virtual.ts
@@ -0,0 +1,55 @@
+import { cloneDeep } from 'lodash-es';
+import { VFile } from 'vfile';
+import {
+ FlatbreadArgs,
+ LoadedCollectionEntry,
+ LoadedFlatbreadConfig,
+ Source,
+} from '../types';
+
+interface MemContext {
+ id: string;
+ collectionName: string;
+}
+
+export class SourceVirtual implements Source {
+ private data: Record = {};
+
+ public id = '@flatbread/sourceMemory';
+
+ constructor(data: Record) {
+ this.data = data;
+ }
+
+ initialize(config: LoadedFlatbreadConfig) {}
+
+ async fetch(
+ entries: LoadedCollectionEntry[],
+ { addRecord }: FlatbreadArgs
+ ) {
+ for (const entry of entries) {
+ if (!this.data[entry.name])
+ throw new Error(`can't find collection ${entry.name}`);
+ for (const record of this.data[entry.name]) {
+ addRecord(entry, cloneDeep(record), {
+ id: record.path,
+ collectionName: entry.name,
+ });
+ }
+ }
+ }
+
+ async put(doc: VFile, context: MemContext, { parentContext }: any) {
+ const record = this.data[parentContext.collection].find(
+ (entry) => entry.path === parentContext.reference
+ );
+
+ if (record) {
+ record.value = doc.value;
+ } else {
+ this.data[parentContext.collection].push(doc);
+ }
+
+ return { doc, context };
+ }
+}
diff --git a/packages/core/src/types.ts b/packages/core/src/types.ts
index 39237b2e..428d9fd2 100644
--- a/packages/core/src/types.ts
+++ b/packages/core/src/types.ts
@@ -21,16 +21,16 @@ export type ContentNode = BaseContentNode & {
* @todo This needs to be typed more strictly.
*/
export interface FlatbreadConfig {
- source: Source;
+ source: Source;
transformer?: Transformer | Transformer[];
- content: Content;
+ content: CollectionEntry[];
fieldNameTransform?: (field: string) => string;
}
export interface LoadedFlatbreadConfig {
- source: Source;
+ source: Source;
transformer: Transformer[];
- content: Content;
+ content: LoadedCollectionEntry[];
fieldNameTransform: (field: string) => string;
loaded: {
extensions: string[];
@@ -53,8 +53,10 @@ export interface Transformer {
* @param input Node to transform
*/
parse?: (input: VFile) => EntryNode;
+ id?: string;
preknownSchemaFragments?: () => Record;
inspect: (input: EntryNode) => string;
+ serialize: (input: EntryNode, ctx: CollectionContext) => VFile;
extensions: string[];
}
@@ -69,15 +71,36 @@ export type EntryNode = Record;
* The result of an invoked `Source` plugin which contains methods on how to retrieve content nodes in
* their raw (if coupled with a `Transformer` plugin) or processed form.
*/
-export interface Source {
+
+export interface FlatbreadArgs {
+ addRecord(
+ collection: LoadedCollectionEntry,
+ record: EntryNode,
+ context?: Context
+ ): void;
+ addCreationRequiredFields(
+ collection: LoadedCollectionEntry,
+ fields: string[]
+ ): void;
+}
+
+export interface Source {
initialize?: (flatbreadConfig: LoadedFlatbreadConfig) => void;
- fetchByType?: (path: string) => Promise;
+ id?: string;
+ put: (
+ source: VFile,
+ ctx: Context,
+ opts: { parentContext: any; collection: CollectionEntry; record: any }
+ ) => Promise<{ doc: VFile; context: Context }>;
fetch: (
- allContentTypes: Record[]
- ) => Promise>;
+ allContentTypes: LoadedCollectionEntry[],
+ flatbread: FlatbreadArgs
+ ) => Promise;
}
-export type SourcePlugin = (sourceConfig?: Record) => Source;
+export type SourcePlugin = (
+ sourceConfig?: Record
+) => Source;
/**
* An override can be used to declare a custom resolve for a field in content
@@ -94,13 +117,35 @@ export interface Override {
) => any;
}
+export interface CollectionContext {
+ referenceField: string;
+ collection: string;
+ filename: string;
+ path: string;
+ slug: string;
+ sourcedBy: string;
+ transformedBy: string;
+ reference: string;
+}
+
/**
- * An array of content descriptions which can be used to retrieve content nodes.
+ * A collection entry which can be used to retrieve content nodes.
*
* This is paired with a `Source` (and, *optionally*, a `Transformer`) plugin.
*/
-export type Content = {
- collection: string;
+
+export interface CollectionEntry {
+ name: string;
+ path: string;
overrides?: Override[];
- [key: string]: any;
-}[];
+ refs?: Record;
+ referenceField?: string;
+ creationRequiredFields?: string[];
+ defaultTransformer?: string;
+ defaultSource?: string;
+}
+
+export interface LoadedCollectionEntry extends CollectionEntry {
+ referenceField: string;
+ creationRequiredFields: string[];
+}
diff --git a/packages/core/src/utils/createShaHash.ts b/packages/core/src/utils/createShaHash.ts
new file mode 100644
index 00000000..a194072a
--- /dev/null
+++ b/packages/core/src/utils/createShaHash.ts
@@ -0,0 +1,6 @@
+import { anyToString } from './stringUtils';
+import { createHash } from 'crypto';
+
+export default function createShaHash(content: any) {
+ return createHash('sha256').update(anyToString(content)).digest('hex');
+}
diff --git a/packages/core/src/utils/createUniqueId.ts b/packages/core/src/utils/createUniqueId.ts
new file mode 100644
index 00000000..c860ed01
--- /dev/null
+++ b/packages/core/src/utils/createUniqueId.ts
@@ -0,0 +1,8 @@
+import { customAlphabet } from 'nanoid';
+
+// only use lower case letters and numbers to avoid issues with windows ignoring case on filenames
+const nanoid = customAlphabet('abcdefghijklmnopqrstuvwxyz0123456789');
+
+export function createUniqueId() {
+ return nanoid();
+}
diff --git a/packages/core/src/utils/fieldOverrides.ts b/packages/core/src/utils/fieldOverrides.ts
index f3106bb9..5122ea14 100644
--- a/packages/core/src/utils/fieldOverrides.ts
+++ b/packages/core/src/utils/fieldOverrides.ts
@@ -1,20 +1,24 @@
-import { FlatbreadConfig, Override } from '../types';
import { get, set } from 'lodash-es';
+import { CollectionEntry } from '../../dist';
+import { Override } from '../types';
/**
* Get an object containing functions nested in an object structure
* aligning to the listed overrides in the config
*
- * @param collection the collection string referenced in the config
- * @param config the flatbread config object
+ * @param collectionName the collection string referenced in the config
+ * @param entries the flatbread config object
* @returns an object in the shape of the json schema
*/
-export function getFieldOverrides(collection: string, config: FlatbreadConfig) {
- const content = config.content.find(
- (content) => content.collection === collection
+export function getFieldOverrides(
+ collectionName: string,
+ entries: CollectionEntry[]
+) {
+ const collectionEntry = entries.find(
+ (entry) => entry.name === collectionName
);
- if (!content?.overrides) return {};
- const overrides = content.overrides;
+ if (!collectionEntry?.overrides) return {};
+ const overrides = collectionEntry.overrides;
return overrides.reduce((fields: any, override: Override) => {
const { field, type, ...rest } = override;
diff --git a/packages/core/src/utils/initializeConfig.ts b/packages/core/src/utils/initializeConfig.ts
index 8513e310..bb8e2dcf 100644
--- a/packages/core/src/utils/initializeConfig.ts
+++ b/packages/core/src/utils/initializeConfig.ts
@@ -1,6 +1,9 @@
-import { cloneDeep } from 'lodash-es';
+import { cloneDeep, defaultsDeep } from 'lodash-es';
+import { LoadedCollectionEntry } from '../types';
import { FlatbreadConfig, LoadedFlatbreadConfig, Transformer } from '../types';
import { toArray } from './arrayUtils';
+import createShaHash from './createShaHash';
+import { anyToString } from './stringUtils';
import camelCase from './camelCase';
/**
@@ -10,11 +13,22 @@ export function initializeConfig(
rawConfig: FlatbreadConfig
): LoadedFlatbreadConfig {
const config = cloneDeep(rawConfig);
- const transformer = toArray(config.transformer ?? []);
+ const transformer = toArray(config.transformer ?? []).map((t) => {
+ t.id = t.id ?? createShaHash(t);
+ return t;
+ });
+
+ config.source.id = config.source.id ?? createShaHash(config.source);
return {
fieldNameTransform: camelCase,
...config,
+ content: config.content?.map((content: Partial) =>
+ defaultsDeep(content, {
+ referenceField: 'id',
+ creationRequiredFields: [],
+ })
+ ),
transformer,
loaded: {
extensions: transformer
diff --git a/packages/core/src/utils/tests/fieldOverrides.test.ts b/packages/core/src/utils/tests/fieldOverrides.test.ts
index fca200ee..bb93fdf7 100644
--- a/packages/core/src/utils/tests/fieldOverrides.test.ts
+++ b/packages/core/src/utils/tests/fieldOverrides.test.ts
@@ -1,8 +1,9 @@
import test from 'ava';
+import { LoadedCollectionEntry } from '../../types';
import { getFieldOverrides } from '../fieldOverrides.js';
-function getProps(overrides: any[]): [string, any] {
- return ['t', { content: [{ collection: 't', overrides }] }];
+function getProps(overrides: any[]): [string, LoadedCollectionEntry[]] {
+ return ['t', [{ name: 't', overrides, referenceField: 'id' }]];
}
test('basic override', (t) => {
diff --git a/packages/flatbread/README.md b/packages/flatbread/README.md
index 4a46699a..9627f1a2 100644
--- a/packages/flatbread/README.md
+++ b/packages/flatbread/README.md
@@ -83,6 +83,10 @@ export default defineConfig({
{
path: 'content/authors',
collection: 'Author',
+ // the field to use as the primary key, 'id' by default
+ referenceField: 'id',
+ // a list of fields that are required when creating a new record (mostly used by plugins)
+ creationRequiredFields: []
refs: {
friend: 'Author',
},
@@ -253,6 +257,42 @@ Limits the number of returned entries to the specified amount. Accepts an intege
[Check out the example integrations](https://github.com/FlatbreadLabs/flatbread/tree/main/playground) of using Flatbread with frameworks like SvelteKit and Next.js.
+## Create and update records (mutations)
+
+Create a new record
+
+```graphql
+
+mutate ($example: PostInput) {
+ createPost(Post: $example) {
+ id
+ title
+ }
+}
+```
+
+Update an existing record
+
+```graphql
+mutate ($example: PostInput) {
+ updatePost(Post: $example) {
+ id
+ title
+ }
+}
+```
+
+Upsert a record (will update if reference exists, or create a new one)
+
+```graphql
+mutate ($example: PostInput) {
+ upsertPost(Post: $example) {
+ id
+ title
+ }
+}
+```
+
## Field overrides
Field overrides allow you to define custom GraphQL types or resolvers on top of fields in your content. For example, you could [optimize images](https://github.com/FlatbreadLabs/flatbread/tree/main/packages/resolver-svimg/), encapsulate an endpoint, and more!
diff --git a/packages/flatbread/content/authors/me.md b/packages/flatbread/content/authors/me.md
index 408dc67d..200f7328 100644
--- a/packages/flatbread/content/authors/me.md
+++ b/packages/flatbread/content/authors/me.md
@@ -8,7 +8,7 @@ enjoys:
- making this
date_joined: 2021-02-25T16:41:59.558Z
skills:
- sitting: 204
+ sitting: 71
breathing: 7.07
liquid_consumption: 100
existence: simulation
diff --git a/packages/source-filesystem/package.json b/packages/source-filesystem/package.json
index d8e42ded..b124a572 100644
--- a/packages/source-filesystem/package.json
+++ b/packages/source-filesystem/package.json
@@ -38,6 +38,7 @@
},
"devDependencies": {
"@flatbread/core": "workspace:*",
+ "@sindresorhus/slugify": "^2.1.0",
"@types/lodash-es": "4.17.6",
"@types/node": "16.11.47",
"tsup": "6.2.1",
diff --git a/packages/source-filesystem/src/index.test.ts b/packages/source-filesystem/src/index.test.ts
new file mode 100644
index 00000000..6af017b8
--- /dev/null
+++ b/packages/source-filesystem/src/index.test.ts
@@ -0,0 +1,40 @@
+import test from 'ava';
+import { createPath } from './index';
+
+test('create path can correctly populate a path', (t) => {
+ const path = createPath(
+ {
+ name: 'Test',
+ path: '/[test]/[nested.test]/[blah].md',
+ },
+ {
+ test: 'first',
+ nested: {
+ test: 'second-part',
+ },
+ blah: 'blarghhh',
+ },
+ { extension: '' }
+ );
+
+ t.is(path, '/first/second-part/blarghhh.md');
+});
+
+test('create path can correctly populate a path without an extension', (t) => {
+ const path = createPath(
+ {
+ name: 'Test',
+ path: '/[test]/[nested.test]/[blah]',
+ },
+ {
+ test: 'first',
+ nested: {
+ test: 'second-part',
+ },
+ blah: 'blarghhh',
+ },
+ { extension: '.md', reference: 'test-name', referenceField: 'id' }
+ );
+
+ t.is(path, '/first/second-part/blarghhh/test-name.md');
+});
diff --git a/packages/source-filesystem/src/index.ts b/packages/source-filesystem/src/index.ts
index 461cf0d2..b4787607 100644
--- a/packages/source-filesystem/src/index.ts
+++ b/packages/source-filesystem/src/index.ts
@@ -1,7 +1,13 @@
-import { defaultsDeep } from 'lodash-es';
-import { read } from 'to-vfile';
-
-import type { LoadedFlatbreadConfig, SourcePlugin } from '@flatbread/core';
+import type {
+ CollectionEntry,
+ FlatbreadArgs,
+ LoadedCollectionEntry,
+ LoadedFlatbreadConfig,
+} from '@flatbread/core';
+import slugify from '@sindresorhus/slugify';
+import { defaultsDeep, get } from 'lodash-es';
+import path, { relative, resolve } from 'path';
+import { read, write } from 'to-vfile';
import type { VFile } from 'vfile';
import type {
FileNode,
@@ -10,6 +16,12 @@ import type {
} from './types';
import gatherFileNodes from './utils/gatherFileNodes';
+interface Context {
+ filename?: string;
+ path: string;
+ slug: string;
+}
+
/**
* Get nodes (files) from the directory
*
@@ -18,17 +30,30 @@ import gatherFileNodes from './utils/gatherFileNodes';
* @returns An array of content nodes
*/
async function getNodesFromDirectory(
- path: string,
+ collectionEntry: LoadedCollectionEntry,
+ { addRecord, addCreationRequiredFields }: FlatbreadArgs,
config: InitializedSourceFilesystemConfig
-): Promise {
+): Promise {
const { extensions } = config;
- const nodes: FileNode[] = await gatherFileNodes(path, { extensions });
+ const nodes: FileNode[] = await gatherFileNodes(collectionEntry.path, {
+ extensions,
+ });
- return Promise.all(
- nodes.map(async (node: FileNode): Promise => {
- const file = await read(node.path);
- file.data = node.data;
- return file;
+ // collect all the variable path segments [like] [these]
+ const requiredFields = Array.from(
+ collectionEntry.path.matchAll(/\[(.*?)\]/g)
+ ).map((m) => m[1]);
+ addCreationRequiredFields(collectionEntry, requiredFields);
+
+ await Promise.all(
+ nodes.map(async (node: FileNode): Promise => {
+ const doc = await read(node.path);
+ doc.data = node.data;
+ addRecord(collectionEntry, doc, {
+ filename: doc.basename,
+ path: relative(process.cwd(), doc.path),
+ slug: slugify(doc.stem ?? ''),
+ });
})
);
}
@@ -40,26 +65,57 @@ async function getNodesFromDirectory(
* @returns
*/
async function getAllNodes(
- allContentTypes: Record[],
+ allCollectionEntries: LoadedCollectionEntry[],
+ flatbread: FlatbreadArgs,
config: InitializedSourceFilesystemConfig
-): Promise> {
- const nodeEntries = await Promise.all(
- allContentTypes.map(
+): Promise {
+ await Promise.all(
+ allCollectionEntries.map(
async (contentType): Promise> =>
new Promise(async (res) =>
res([
- contentType.collection,
- await getNodesFromDirectory(contentType.path, config),
+ contentType.name,
+ await getNodesFromDirectory(contentType, flatbread, config),
])
)
)
);
+}
- const nodes = Object.fromEntries(
- nodeEntries as Iterable
+export function createPath(
+ collection: CollectionEntry,
+ record: any,
+ parentContext: any
+): string {
+ const partialPath = collection.path.replace(
+ /\[(.*?)\]/g,
+ (_: any, match: any) => get(record, match)
);
- return nodes;
+ const filename = path.parse(partialPath);
+
+ if (!filename.ext) {
+ return resolve(
+ partialPath,
+ parentContext.reference + parentContext.extension
+ );
+ }
+
+ return partialPath;
+}
+
+async function put(
+ doc: VFile,
+ context: Context,
+ { parentContext, collection, record }: any
+) {
+ const path = context?.path ?? createPath(collection, record, parentContext);
+ doc.basename = context?.filename ?? parentContext.reference;
+ doc.path = resolve(process.cwd(), path);
+
+ await write(doc);
+
+ return { doc, context };
}
/**
@@ -68,7 +124,8 @@ async function getAllNodes(
* @param sourceConfig content types config
* @returns A function that returns functions which fetch lists of nodes
*/
-export const source: SourcePlugin = (sourceConfig?: sourceFilesystemConfig) => {
+
+export function source(sourceConfig?: sourceFilesystemConfig) {
let config: InitializedSourceFilesystemConfig;
return {
@@ -76,10 +133,12 @@ export const source: SourcePlugin = (sourceConfig?: sourceFilesystemConfig) => {
const { extensions } = flatbreadConfig.loaded;
config = defaultsDeep(sourceConfig ?? {}, { extensions });
},
- fetchByType: (path: string) => getNodesFromDirectory(path, config),
- fetch: (allContentTypes: Record[]) =>
- getAllNodes(allContentTypes, config),
+ fetch: (
+ content: LoadedCollectionEntry[],
+ flatbread: FlatbreadArgs
+ ) => getAllNodes(content, flatbread, config),
+ put,
};
-};
+}
export default source;
diff --git a/packages/source-filesystem/src/utils/gatherFileNodes.ts b/packages/source-filesystem/src/utils/gatherFileNodes.ts
index 8f880636..a4bdf664 100644
--- a/packages/source-filesystem/src/utils/gatherFileNodes.ts
+++ b/packages/source-filesystem/src/utils/gatherFileNodes.ts
@@ -41,6 +41,11 @@ export default async function gatherFileNodes(
path: string,
{ readDirectory = readDir, extensions }: GatherFileNodesOptions = {}
): Promise {
+ if (path.includes('*'))
+ throw new Error(
+ `* wildcards are not supported, only variable named paths like [example]\nPlease change path in config "${path}"`
+ );
+
/**
* Prepend a period to the extension if it doesn't have one.
* If no extensions are provided, use the default ones.
@@ -51,7 +56,7 @@ export default async function gatherFileNodes(
) ?? ['.md', '.mdx', '.markdown'];
// gather all the globs in the path ( [capture-groups], **, *)
- const [pathPrefix, ...globs] = path.split(/\/(?:\[|\*+)/);
+ const [pathPrefix, ...globs] = path.split(/\/(?:\[)/);
// for each segment - gather names for capture groups
// and calculate what to remove from matches ex: [name].md => remove .md from match
diff --git a/packages/source-filesystem/src/utils/tests/gatherFileNodes.test.ts b/packages/source-filesystem/src/utils/tests/gatherFileNodes.test.ts
index e9b2699c..67bc3134 100644
--- a/packages/source-filesystem/src/utils/tests/gatherFileNodes.test.ts
+++ b/packages/source-filesystem/src/utils/tests/gatherFileNodes.test.ts
@@ -32,20 +32,20 @@ test('basic case', async (t) => {
t.snapshot(result2);
});
-test('double level recursion', async (t) => {
- const result = await gatherFileNodes('deeply/**/*.md', opts);
- t.snapshot(result);
-});
+// test('double level recursion', async (t) => {
+// const result = await gatherFileNodes('deeply/**/*.md', opts);
+// t.snapshot(result);
+// });
test('double level recursion named', async (t) => {
const result = await gatherFileNodes('deeply/[a]/[b].md', opts);
t.snapshot(result);
});
-test('single level recursion', async (t) => {
- const result = await gatherFileNodes('./*.md', opts as any);
- t.snapshot(result);
-});
+// test('single level recursion', async (t) => {
+// const result = await gatherFileNodes('./*.md', opts as any);
+// t.snapshot(result);
+// });
test('double level recursion named without parent directory', async (t) => {
const result = await gatherFileNodes('./[genre]/[title].md', opts);
@@ -57,15 +57,15 @@ test('single level named', async (t) => {
t.snapshot(result);
});
-test('double level first named', async (t) => {
- const result = await gatherFileNodes('./[genre]/*.md', opts);
- t.snapshot(result);
-});
+// test('double level first named', async (t) => {
+// const result = await gatherFileNodes('./[genre]/*.md', opts);
+// t.snapshot(result);
+// });
-test('double level second named', async (t) => {
- const result = await gatherFileNodes('./**/[title].md', opts);
- t.snapshot(result);
-});
+// test('double level second named', async (t) => {
+// const result = await gatherFileNodes('./**/[title].md', opts);
+// t.snapshot(result);
+// });
test('triple level', async (t) => {
const result = await gatherFileNodes('./[random]/[name]/[title].md', opts);
diff --git a/packages/source-filesystem/src/utils/tests/snapshots/gatherFileNodes.test.ts.md b/packages/source-filesystem/src/utils/tests/snapshots/gatherFileNodes.test.ts.md
index ea374d37..766581fa 100644
--- a/packages/source-filesystem/src/utils/tests/snapshots/gatherFileNodes.test.ts.md
+++ b/packages/source-filesystem/src/utils/tests/snapshots/gatherFileNodes.test.ts.md
@@ -41,19 +41,6 @@ Generated by [AVA](https://avajs.dev).
},
]
-## double level recursion
-
-> Snapshot 1
-
- [
- {
- data: {},
- isDirectory: Function isDirectory {},
- name: 'file.md',
- path: 'deeply/nested/file.md',
- },
- ]
-
## double level recursion named
> Snapshot 1
@@ -70,19 +57,6 @@ Generated by [AVA](https://avajs.dev).
},
]
-## single level recursion
-
-> Snapshot 1
-
- [
- {
- data: {},
- isDirectory: Function isDirectory {},
- name: 'random file.md',
- path: '/random file.md',
- },
- ]
-
## double level recursion named without parent directory
> Snapshot 1
@@ -141,84 +115,6 @@ Generated by [AVA](https://avajs.dev).
},
]
-## double level first named
-
-> Snapshot 1
-
- [
- {
- data: {
- genre: 'Comedy',
- },
- isDirectory: Function isDirectory {},
- name: 'Nine Lives of Tomas Katz, The.md',
- path: 'Comedy/Nine Lives of Tomas Katz, The.md',
- },
- {
- data: {
- genre: 'Comedy',
- },
- isDirectory: Function isDirectory {},
- name: 'Road to Wellville, The.md',
- path: 'Comedy/Road to Wellville, The.md',
- },
- {
- data: {
- genre: 'Drama',
- },
- isDirectory: Function isDirectory {},
- name: 'Life for Sale (Life for Sale (Kotirauha).md',
- path: 'Drama/Life for Sale (Life for Sale (Kotirauha).md',
- },
- {
- data: {
- genre: 'Documentary',
- },
- isDirectory: Function isDirectory {},
- name: 'TerrorStorm: A History of Government-Sponsored Terrorism.md',
- path: 'Documentary/TerrorStorm: A History of Government-Sponsored Terrorism.md',
- },
- ]
-
-## double level second named
-
-> Snapshot 1
-
- [
- {
- data: {
- title: 'Nine Lives of Tomas Katz, The',
- },
- isDirectory: Function isDirectory {},
- name: 'Nine Lives of Tomas Katz, The.md',
- path: 'Comedy/Nine Lives of Tomas Katz, The.md',
- },
- {
- data: {
- title: 'Road to Wellville, The',
- },
- isDirectory: Function isDirectory {},
- name: 'Road to Wellville, The.md',
- path: 'Comedy/Road to Wellville, The.md',
- },
- {
- data: {
- title: 'Life for Sale (Life for Sale (Kotirauha)',
- },
- isDirectory: Function isDirectory {},
- name: 'Life for Sale (Life for Sale (Kotirauha).md',
- path: 'Drama/Life for Sale (Life for Sale (Kotirauha).md',
- },
- {
- data: {
- title: 'TerrorStorm: A History of Government-Sponsored Terrorism',
- },
- isDirectory: Function isDirectory {},
- name: 'TerrorStorm: A History of Government-Sponsored Terrorism.md',
- path: 'Documentary/TerrorStorm: A History of Government-Sponsored Terrorism.md',
- },
- ]
-
## triple level
> Snapshot 1
diff --git a/packages/source-filesystem/src/utils/tests/snapshots/gatherFileNodes.test.ts.snap b/packages/source-filesystem/src/utils/tests/snapshots/gatherFileNodes.test.ts.snap
index 8437dd53..343025d6 100644
Binary files a/packages/source-filesystem/src/utils/tests/snapshots/gatherFileNodes.test.ts.snap and b/packages/source-filesystem/src/utils/tests/snapshots/gatherFileNodes.test.ts.snap differ
diff --git a/packages/source-filesystem/tsup.config.ts b/packages/source-filesystem/tsup.config.ts
index 24b848e6..fa78cb57 100644
--- a/packages/source-filesystem/tsup.config.ts
+++ b/packages/source-filesystem/tsup.config.ts
@@ -4,7 +4,7 @@ export const tsup: Options = {
splitting: false,
sourcemap: true,
clean: true,
- entryPoints: ['src/*'],
+ entryPoints: ['src/index.ts'],
format: ['esm'],
target: 'esnext',
dts: true,
diff --git a/packages/transformer-markdown/package.json b/packages/transformer-markdown/package.json
index c97d3800..3f6888ba 100644
--- a/packages/transformer-markdown/package.json
+++ b/packages/transformer-markdown/package.json
@@ -32,7 +32,6 @@
"node": "^14.13.1 || >=16.0.0"
},
"dependencies": {
- "@sindresorhus/slugify": "^2.1.0",
"graphql": "16.5.0",
"gray-matter": "^4.0.3",
"lodash-es": "^4.17.21",
diff --git a/packages/transformer-markdown/src/index.ts b/packages/transformer-markdown/src/index.ts
index 54d5a01a..eb8dd087 100644
--- a/packages/transformer-markdown/src/index.ts
+++ b/packages/transformer-markdown/src/index.ts
@@ -1,10 +1,14 @@
import matter from 'gray-matter';
-import slugify from '@sindresorhus/slugify';
-import { html, excerpt, timeToRead } from './graphql/schema-helpers';
+import { excerpt, html, timeToRead } from './graphql/schema-helpers';
+import ownPackage from '../package.json' assert { type: 'json' };
+import type {
+ CollectionContext,
+ EntryNode,
+ TransformerPlugin,
+} from '@flatbread/core';
+import { VFile } from 'vfile';
import type { MarkdownTransformerConfig } from './types';
-import type { EntryNode, TransformerPlugin } from '@flatbread/core';
-import type { VFile } from 'vfile';
export * from './types';
@@ -20,17 +24,27 @@ export const parse = (
): EntryNode => {
const { data, content } = matter(String(input), config.grayMatter);
return {
- _filename: input.basename,
- _path: input.path,
- _slug: slugify(input.stem ?? ''),
- ...input.data,
- ...data,
- _content: {
- raw: content,
+ record: {
+ ...input.data,
+ ...data,
+ _content: {
+ raw: content,
+ },
},
};
};
+function serialize(
+ data: EntryNode,
+ ctx: CollectionContext,
+ config: MarkdownTransformerConfig
+) {
+ const { _content, ...rest } = data;
+ const doc = matter.stringify(_content?.raw ?? '', rest, config.grayMatter);
+
+ return new VFile(doc);
+}
+
/**
* Converts markdown files to meaningful data.
*
@@ -45,6 +59,7 @@ export const transformer: TransformerPlugin = (
);
return {
parse: (input: VFile): EntryNode => parse(input, config),
+ id: ownPackage.name,
preknownSchemaFragments: () => ({
_content: {
html: html(config),
@@ -53,6 +68,8 @@ export const transformer: TransformerPlugin = (
},
}),
inspect: (input: EntryNode) => String(input),
+ serialize: (input: EntryNode, ctx: CollectionContext) =>
+ serialize(input, ctx, config),
extensions,
};
};
diff --git a/packages/transformer-yaml/package.json b/packages/transformer-yaml/package.json
index 761722e4..0857c7de 100644
--- a/packages/transformer-yaml/package.json
+++ b/packages/transformer-yaml/package.json
@@ -32,7 +32,6 @@
"node": "^14.13.1 || >=16.0.0"
},
"dependencies": {
- "@sindresorhus/slugify": "^2.1.0",
"js-yaml": "^4.1.0"
},
"devDependencies": {
diff --git a/packages/transformer-yaml/src/index.ts b/packages/transformer-yaml/src/index.ts
index f5b36ed3..b16cf861 100644
--- a/packages/transformer-yaml/src/index.ts
+++ b/packages/transformer-yaml/src/index.ts
@@ -1,8 +1,12 @@
-import yaml from 'js-yaml';
+import type {
+ CollectionContext,
+ EntryNode,
+ TransformerPlugin,
+} from '@flatbread/core';
import type { YAMLException } from 'js-yaml';
-import slugify from '@sindresorhus/slugify';
-import type { EntryNode, TransformerPlugin } from '@flatbread/core';
-import type { VFile } from 'vfile';
+import yaml from 'js-yaml';
+import { VFile } from 'vfile';
+import ownPackage from '../package.json' assert { type: 'json' };
/**
* Transforms a yaml file (content node) to JSON.
@@ -18,11 +22,10 @@ export const parse = (input: VFile): EntryNode => {
if (typeof doc === 'object') {
return {
- _filename: input.basename,
- _path: input.path,
- _slug: slugify(input.stem ?? ''),
- ...input.data,
- ...doc,
+ record: {
+ ...input.data,
+ ...doc,
+ },
};
}
throw new Error(
@@ -32,15 +35,22 @@ export const parse = (input: VFile): EntryNode => {
);
};
+function serialize(node: EntryNode, ctx: CollectionContext): VFile {
+ const doc = yaml.dump(node);
+ return new VFile(doc);
+}
+
/**
- * Converts markdown files to meaningful data.
+ * Converts yaml files to meaningful data.
*
- * @returns Markdown parser, preknown GraphQL schema fragments, and an EntryNode inspector function.
+ * @returns yaml parser, preknown GraphQL schema fragments, and an EntryNode inspector function.
*/
export const transformer: TransformerPlugin = () => {
return {
parse: (input: VFile): EntryNode => parse(input),
inspect: (input: EntryNode) => String(input),
+ id: ownPackage.name,
+ serialize,
extensions: ['.yaml', '.yml'],
};
};
diff --git a/packages/transformer-yaml/src/tests/index.test.ts b/packages/transformer-yaml/src/tests/index.test.ts
index ba28b068..5246dee3 100644
--- a/packages/transformer-yaml/src/tests/index.test.ts
+++ b/packages/transformer-yaml/src/tests/index.test.ts
@@ -26,6 +26,6 @@ const transformer = Transformer();
test('it can parse a basic yaml file', async (t) => {
const parse = transformer.parse as (input: VFile) => EntryNode;
- const node = parse(testFile);
- t.snapshot(node);
+ const { record } = parse(testFile);
+ t.snapshot(record);
});
diff --git a/packages/transformer-yaml/src/tests/snapshots/index.test.ts.md b/packages/transformer-yaml/src/tests/snapshots/index.test.ts.md
index 4174cf44..ee331630 100644
--- a/packages/transformer-yaml/src/tests/snapshots/index.test.ts.md
+++ b/packages/transformer-yaml/src/tests/snapshots/index.test.ts.md
@@ -9,9 +9,6 @@ Generated by [AVA](https://avajs.dev).
> Snapshot 1
{
- _filename: undefined,
- _path: undefined,
- _slug: '',
date_joined: Date 2021-02-25 16:41:59 558ms UTC {},
enjoys: [
'cats',
diff --git a/packages/transformer-yaml/src/tests/snapshots/index.test.ts.snap b/packages/transformer-yaml/src/tests/snapshots/index.test.ts.snap
index 8de1e400..b68787a8 100644
Binary files a/packages/transformer-yaml/src/tests/snapshots/index.test.ts.snap and b/packages/transformer-yaml/src/tests/snapshots/index.test.ts.snap differ
diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml
index fc13aaed..8d439604 100644
--- a/pnpm-lock.yaml
+++ b/pnpm-lock.yaml
@@ -111,7 +111,7 @@ importers:
devDependencies:
'@flatbread/transformer-yaml': link:../../packages/transformer-yaml
'@sveltejs/adapter-static': 1.0.0-next.39
- '@sveltejs/kit': 1.0.0-next.405_svelte@3.49.0+vite@3.0.4
+ '@sveltejs/kit': 1.0.0-next.408_svelte@3.49.0+vite@3.0.4
'@typescript-eslint/eslint-plugin': 4.33.0_3ekaj7j3owlolnuhj3ykrb7u7i
'@typescript-eslint/parser': 4.33.0_hxadhbs2xogijvk7vq4t2azzbu
autoprefixer: 10.4.7_postcss@8.4.14
@@ -156,6 +156,7 @@ importers:
lodash-es: 4.17.21
lru-cache: 7.13.2
matcher: 5.0.0
+ nanoid: 4.0.0
plur: 5.1.0
tsup: 6.2.1
typescript: 4.7.4
@@ -167,6 +168,7 @@ importers:
lodash-es: 4.17.21
lru-cache: 7.13.2
matcher: 5.0.0
+ nanoid: 4.0.0
plur: 5.1.0
devDependencies:
'@types/lodash-es': 4.17.6
@@ -251,6 +253,7 @@ importers:
packages/source-filesystem:
specifiers:
'@flatbread/core': workspace:*
+ '@sindresorhus/slugify': ^2.1.0
'@types/lodash-es': 4.17.6
'@types/node': 16.11.47
lodash-es: ^4.17.21
@@ -265,6 +268,7 @@ importers:
unified: 10.1.2
devDependencies:
'@flatbread/core': link:../core
+ '@sindresorhus/slugify': 2.1.0
'@types/lodash-es': 4.17.6
'@types/node': 16.11.47
tsup: 6.2.1_typescript@4.7.4
@@ -274,7 +278,6 @@ importers:
packages/transformer-markdown:
specifiers:
'@flatbread/core': workspace:*
- '@sindresorhus/slugify': ^2.1.0
'@types/node': 16.11.47
'@types/sanitize-html': 2.6.2
graphql: 16.5.0
@@ -304,7 +307,6 @@ importers:
unified: ^10.1.2
vfile: 5.3.4
dependencies:
- '@sindresorhus/slugify': 2.1.0
graphql: 16.5.0
gray-matter: 4.0.3
lodash-es: 4.17.21
@@ -339,7 +341,6 @@ importers:
packages/transformer-yaml:
specifiers:
'@flatbread/core': workspace:*
- '@sindresorhus/slugify': ^2.1.0
'@types/js-yaml': 4.0.5
'@types/node': 16.11.47
js-yaml: ^4.1.0
@@ -347,7 +348,6 @@ importers:
typescript: 4.7.4
vfile: 5.3.4
dependencies:
- '@sindresorhus/slugify': 2.1.0
js-yaml: 4.1.0
devDependencies:
'@flatbread/core': link:../core
@@ -1464,6 +1464,10 @@ packages:
typescript: 4.7.4
dev: true
+ /@polka/url/1.0.0-next.21:
+ resolution: {integrity: sha512-a5Sab1C4/icpTZVzZc5Ghpz88yQtGOyNqYXcZgOssB2uuAr+wF/MvN6bgtW32q7HHrvBki+BsZ0OuNv6EV3K9g==}
+ dev: true
+
/@protobufjs/aspromise/1.1.2:
resolution: {integrity: sha512-j+gKExEuLmKwvz3OgROXtrJ2UG2x8Ch2YZUxahh+s1F2HZ+wAceUNLkvy6zKCPVRkU++ZWQrdxsUeQXmcg4uoQ==}
dev: false
@@ -1552,7 +1556,7 @@ packages:
dependencies:
'@sindresorhus/transliterate': 1.5.0
escape-string-regexp: 5.0.0
- dev: false
+ dev: true
/@sindresorhus/transliterate/1.5.0:
resolution: {integrity: sha512-/sfSkoNelLq5riqNRp5uBjHIKBi1MWZk9ubRT1WiBQuTfmDf7BeQkph2DJzRB83QagMPHk2VDjuvpy0VuwyzdA==}
@@ -1560,7 +1564,7 @@ packages:
dependencies:
escape-string-regexp: 5.0.0
lodash.deburr: 4.1.0
- dev: false
+ dev: true
/@sinonjs/commons/1.8.3:
resolution: {integrity: sha512-xkNcLAn/wZaX14RPlwizcKicDk9G3F8m2nU3L7Ukm5zBgTwiT0wsoFAHx9Jq56fJA1z/7uKGtCRu16sOUCLIHQ==}
@@ -1578,8 +1582,8 @@ packages:
resolution: {integrity: sha512-EeD39H6iEe0UEKnKxLFTZFZpi/FcX5xfbAvsMQ+B09aDZccpQmkJBSIo+4kq1JsQGSjwi/+J3aE9bR67R6CIyQ==}
dev: true
- /@sveltejs/kit/1.0.0-next.405_svelte@3.49.0+vite@3.0.4:
- resolution: {integrity: sha512-jHSa74F7k+hC+0fof75g/xm/+1M5sM66Qt6v8eLLMSgjkp36Lb5xOioBhbl6w0NYoE5xysLsBWuu+yHytfvCBA==}
+ /@sveltejs/kit/1.0.0-next.408_svelte@3.49.0+vite@3.0.4:
+ resolution: {integrity: sha512-AOSa0o7EJnFN56IptIMGvcbLNPlGdFKtzq0RYnYnzJAvtAocWrNdAL26gIFj2f6iYdBDRR6mm/a8X1yCQ9mCdA==}
engines: {node: '>=16.9'}
hasBin: true
requiresBuild: true
@@ -1589,9 +1593,18 @@ packages:
dependencies:
'@sveltejs/vite-plugin-svelte': 1.0.1_svelte@3.49.0+vite@3.0.4
chokidar: 3.5.3
+ cookie: 0.5.0
+ devalue: 2.0.1
+ kleur: 4.1.5
+ magic-string: 0.26.2
+ mime: 3.0.0
+ node-fetch: 3.2.10
sade: 1.8.1
+ set-cookie-parser: 2.5.1
+ sirv: 2.0.2
svelte: 3.49.0
tiny-glob: 0.2.9
+ undici: 5.8.2
vite: 3.0.4
transitivePeerDependencies:
- diff-match-patch
@@ -3166,7 +3179,6 @@ packages:
/cookie/0.5.0:
resolution: {integrity: sha512-YZ3GUyn/o8gfKJlnlX7g7xq4gyO6OSuhGPKaaGssGB2qgDUS0gPgtTvoyZLTt9Ab6dC4hfc9dV5arkvc/OCmrw==}
engines: {node: '>= 0.6'}
- dev: false
/core-js-pure/3.23.5:
resolution: {integrity: sha512-8t78LdpKSuCq4pJYCYk8hl7XEkAX+BP16yRIwL3AanTksxuEf7CM83vRyctmiEL8NDZ3jpUcv56fk9/zG3aIuw==}
@@ -3247,6 +3259,11 @@ packages:
resolution: {integrity: sha512-sdQSFB7+llfUcQHUQO3+B8ERRj0Oa4w9POWMI/puGtuf7gFywGmkaLCElnudfTiKZV+NvHqL0ifzdrI8Ro7ESA==}
dev: true
+ /data-uri-to-buffer/4.0.0:
+ resolution: {integrity: sha512-Vr3mLBA8qWmcuschSLAOogKgQ/Jwxulv3RNE4FXnYWRGujzrRWQI4m12fQqRkwX06C0KanhLr4hK+GydchZsaA==}
+ engines: {node: '>= 12'}
+ dev: true
+
/data-urls/2.0.0:
resolution: {integrity: sha512-X5eWTSXO/BJmpdIKCRuKUgSCgAN0OwliVK3yPKbwIWU1Tdw5BRajxlzMidvh+gwko9AfQ9zIj52pzF91Q3YAvQ==}
engines: {node: '>=10'}
@@ -3413,6 +3430,10 @@ packages:
minimist: 1.2.6
dev: true
+ /devalue/2.0.1:
+ resolution: {integrity: sha512-I2TiqT5iWBEyB8GRfTDP0hiLZ0YeDJZ+upDxjBfOC2lebO5LezQMv7QvIUTzdb64jQyAKLf1AHADtGN+jw6v8Q==}
+ dev: true
+
/didyoumean/1.2.2:
resolution: {integrity: sha512-gxtyfqMg7GKyhQmb056K7M3xszy/myH8w+B4RT+QXBQsvAOdc3XymqDDPHx1BgPgsdAA5SIifona89YtRATDzw==}
dev: true
@@ -4983,6 +5004,14 @@ packages:
bser: 2.1.1
dev: true
+ /fetch-blob/3.2.0:
+ resolution: {integrity: sha512-7yAQpD2UMJzLi1Dqv7qFYnPbaPx7ZfFK6PiIxQ4PfkGPyNyl2Ugx+a/umUonmKqjhM4DnfbMvdX6otXq83soQQ==}
+ engines: {node: ^12.20 || >= 14.13}
+ dependencies:
+ node-domexception: 1.0.0
+ web-streams-polyfill: 3.2.1
+ dev: true
+
/figures/3.2.0:
resolution: {integrity: sha512-yaduQFRKLXYOGgEn6AZau90j3ggSOyiqXU0F9JZfeXYhNa+Jk4X+s45A2zg5jns87GAFa34BBm2kXw4XpNcbdg==}
engines: {node: '>=8'}
@@ -5091,6 +5120,13 @@ packages:
engines: {node: '>=0.4.x'}
dev: false
+ /formdata-polyfill/4.0.10:
+ resolution: {integrity: sha512-buewHzMvYL29jdeQTVILecSaZKnt/RJWjoZCF5OW60Z67/GmSLBkOFM7qh1PI3zFNtJbaZL5eQu1vLfazOwj4g==}
+ engines: {node: '>=12.20.0'}
+ dependencies:
+ fetch-blob: 3.2.0
+ dev: true
+
/forwarded/0.2.0:
resolution: {integrity: sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow==}
engines: {node: '>= 0.6'}
@@ -6594,7 +6630,7 @@ packages:
/lodash.deburr/4.1.0:
resolution: {integrity: sha512-m/M1U1f3ddMCs6Hq2tAsYThTBDaAKFDX3dwDo97GEYzamXi9SqUpjWi/Rrj/gf3X2n8ktwgZrlP1z6E3v/IExQ==}
- dev: false
+ dev: true
/lodash.merge/4.6.2:
resolution: {integrity: sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==}
@@ -7184,6 +7220,12 @@ packages:
hasBin: true
dev: false
+ /mime/3.0.0:
+ resolution: {integrity: sha512-jSCU7/VB1loIWBZe14aEYHU/+1UMEHoaO7qxCOVJOw9GgH72VAWppxNcjU+x9a2k3GSIBXNKxXQFqRvvZ7vr3A==}
+ engines: {node: '>=10.0.0'}
+ hasBin: true
+ dev: true
+
/mimic-fn/2.1.0:
resolution: {integrity: sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==}
engines: {node: '>=6'}
@@ -7239,6 +7281,11 @@ packages:
resolution: {integrity: sha512-tzzskb3bG8LvYGFF/mDTpq3jpI6Q9wc3LEmBaghu+DdCssd1FakN7Bc0hVNmEyGq1bq3RgfkCb3cmQLpNPOroA==}
engines: {node: '>=4'}
+ /mrmime/1.0.1:
+ resolution: {integrity: sha512-hzzEagAgDyoU1Q6yg5uI+AorQgdvMCur3FcKf7NhMKWsaYg+RnbTyHRa/9IlLF9rf455MOCtcqqrQQ83pPP7Uw==}
+ engines: {node: '>=10'}
+ dev: true
+
/ms/2.0.0:
resolution: {integrity: sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==}
@@ -7276,6 +7323,12 @@ packages:
engines: {node: ^10 || ^12 || ^13.7 || ^14 || >=15.0.1}
hasBin: true
+ /nanoid/4.0.0:
+ resolution: {integrity: sha512-IgBP8piMxe/gf73RTQx7hmnhwz0aaEXYakvqZyE302IXW3HyVNhdNGC+O2MwMAVhLEnvXlvKtGbtJf6wvHihCg==}
+ engines: {node: ^14 || ^16 || >=18}
+ hasBin: true
+ dev: false
+
/napi-build-utils/1.0.2:
resolution: {integrity: sha512-ONmRUqK7zj7DWX0D9ADe03wbwOBZxNAfF20PlGfCWQcD3+/MakShIHrMqx9YwPTfxDdF1zLeL+RGZiR9kGMLdg==}
@@ -7346,6 +7399,11 @@ packages:
/node-addon-api/5.0.0:
resolution: {integrity: sha512-CvkDw2OEnme7ybCykJpVcKH+uAOLV2qLqiyla128dN9TkEWfrYmxG6C2boDe5KcNQqZF3orkqzGgOMvZ/JNekA==}
+ /node-domexception/1.0.0:
+ resolution: {integrity: sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ==}
+ engines: {node: '>=10.5.0'}
+ dev: true
+
/node-emoji/1.11.0:
resolution: {integrity: sha512-wo2DpQkQp7Sjm2A0cq+sN7EHKO6Sl0ctXeBdFZrL9T9+UywORbufTcTZxom8YqpLQt/FqNMUkOpkZrJVYSKD3A==}
dependencies:
@@ -7364,6 +7422,15 @@ packages:
whatwg-url: 5.0.0
dev: false
+ /node-fetch/3.2.10:
+ resolution: {integrity: sha512-MhuzNwdURnZ1Cp4XTazr69K0BTizsBroX7Zx3UgDSVcZYKF/6p0CBe4EUb/hLqmzVhl0UpYfgRljQ4yxE+iCxA==}
+ engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0}
+ dependencies:
+ data-uri-to-buffer: 4.0.0
+ fetch-blob: 3.2.0
+ formdata-polyfill: 4.0.10
+ dev: true
+
/node-gyp-build/4.5.0:
resolution: {integrity: sha512-2iGbaQBV+ITgCz76ZEjmhUKAKVf7xfY1sRl4UiKQspfZMH2h06SyhNsnSVy50cwkFQDGLyif6m/6uFXHkOZ6rg==}
hasBin: true
@@ -8553,6 +8620,10 @@ packages:
- supports-color
dev: false
+ /set-cookie-parser/2.5.1:
+ resolution: {integrity: sha512-1jeBGaKNGdEq4FgIrORu/N570dwoPYio8lSoYLWmX7sQ//0JY08Xh9o5pBcgmHQ/MbsYp/aZnOe1s1lIsbLprQ==}
+ dev: true
+
/setprototypeof/1.2.0:
resolution: {integrity: sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==}
dev: false
@@ -8617,6 +8688,15 @@ packages:
dependencies:
is-arrayish: 0.3.2
+ /sirv/2.0.2:
+ resolution: {integrity: sha512-4Qog6aE29nIjAOKe/wowFTxOdmbEZKb+3tsLljaBRzJwtqto0BChD2zzH0LhgCSXiI+V7X+Y45v14wBZQ1TK3w==}
+ engines: {node: '>= 10'}
+ dependencies:
+ '@polka/url': 1.0.0-next.21
+ mrmime: 1.0.1
+ totalist: 3.0.0
+ dev: true
+
/sisteransi/1.0.5:
resolution: {integrity: sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg==}
dev: true
@@ -9235,6 +9315,11 @@ packages:
engines: {node: '>=0.6'}
dev: false
+ /totalist/3.0.0:
+ resolution: {integrity: sha512-eM+pCBxXO/njtF7vdFsHuqb+ElbxqtI4r5EAvk6grfAFyJ6IvWlSkfZ5T9ozC6xWw3Fj1fGoSmrl0gUs46JVIw==}
+ engines: {node: '>=6'}
+ dev: true
+
/tough-cookie/4.0.0:
resolution: {integrity: sha512-tHdtEpQCMrc1YLrMaqXXcj6AxhYi/xgit6mZu1+EDWUn+qhUf8wMQoFIy9NXuq23zAwtcB0t/MjACGR18pcRbg==}
engines: {node: '>=6'}
@@ -9495,6 +9580,11 @@ packages:
which-boxed-primitive: 1.0.2
dev: true
+ /undici/5.8.2:
+ resolution: {integrity: sha512-3KLq3pXMS0Y4IELV045fTxqz04Nk9Ms7yfBBHum3yxsTR4XNn+ZCaUbf/mWitgYDAhsplQ0B1G4S5D345lMO3A==}
+ engines: {node: '>=12.18'}
+ dev: true
+
/unified/10.1.2:
resolution: {integrity: sha512-pUSWAi/RAnVy1Pif2kAoeWNBa3JVrx0MId2LASj8G+7AiHWoKZNTomq6LG326T68U7/e263X6fTdcXIy7XnF7Q==}
dependencies:
@@ -9752,6 +9842,11 @@ packages:
resolution: {integrity: sha512-bKr1DkiNa2krS7qxNtdrtHAmzuYGFQLiQ13TsorsdT6ULTkPLKuu5+GsFpDlg6JFjUTwX2DyhMPG2be8uPrqsQ==}
dev: false
+ /web-streams-polyfill/3.2.1:
+ resolution: {integrity: sha512-e0MO3wdXWKrLbL0DgGnUV7WHVuw9OUvL4hjgnPkIeEvESk74gAITi5G606JtZPp39cd8HA9VQzCIvA49LpPN5Q==}
+ engines: {node: '>= 8'}
+ dev: true
+
/webidl-conversions/3.0.1:
resolution: {integrity: sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==}
dev: false
diff --git a/tsconfig.json b/tsconfig.json
index 2de15ee4..ed99ee0c 100644
--- a/tsconfig.json
+++ b/tsconfig.json
@@ -24,5 +24,9 @@
]
}
},
+ //https://github.com/avajs/ava/discussions/3036#discussioncomment-2928239
+ "ts-node": {
+ "transpileOnly": true
+ },
"exclude": ["**/dist/**", "**/node_modules/**"]
}