From 772ca12a0562bce6d79fe0e93fb6a9e1f6d078d5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Wed, 24 Sep 2025 00:05:00 +0200 Subject: [PATCH 01/29] feat: initial db pull implementation --- packages/cli/package.json | 2 + packages/cli/src/actions/action-utils.ts | 14 + packages/cli/src/actions/db.ts | 70 ++++- packages/cli/src/actions/pull/index.ts | 238 +++++++++++++++++ .../cli/src/actions/pull/provider/index.ts | 9 + .../src/actions/pull/provider/postgresql.ts | 242 ++++++++++++++++++ .../cli/src/actions/pull/provider/provider.ts | 44 ++++ .../cli/src/actions/pull/provider/sqlite.ts | 191 ++++++++++++++ packages/cli/src/actions/pull/utils.ts | 67 +++++ packages/language/src/document.ts | 5 +- pnpm-lock.yaml | 30 +++ 11 files changed, 906 insertions(+), 6 deletions(-) create mode 100644 packages/cli/src/actions/pull/index.ts create mode 100644 packages/cli/src/actions/pull/provider/index.ts create mode 100644 packages/cli/src/actions/pull/provider/postgresql.ts create mode 100644 packages/cli/src/actions/pull/provider/provider.ts create mode 100644 packages/cli/src/actions/pull/provider/sqlite.ts create mode 100644 packages/cli/src/actions/pull/utils.ts diff --git a/packages/cli/package.json b/packages/cli/package.json index 87a680c3..4826028f 100644 --- a/packages/cli/package.json +++ b/packages/cli/package.json @@ -46,6 +46,7 @@ }, "devDependencies": { "@types/better-sqlite3": "catalog:", + "@types/pg": "^8.11.11", "@types/semver": "^7.7.0", "@types/tmp": "catalog:", "@zenstackhq/eslint-config": "workspace:*", @@ -54,6 +55,7 @@ "@zenstackhq/typescript-config": "workspace:*", "@zenstackhq/vitest-config": "workspace:*", "better-sqlite3": "catalog:", + "pg": "^8.16.3", "tmp": "catalog:" } } diff --git a/packages/cli/src/actions/action-utils.ts b/packages/cli/src/actions/action-utils.ts index d3d0dacf..6ef29821 100644 --- a/packages/cli/src/actions/action-utils.ts +++ b/packages/cli/src/actions/action-utils.ts @@ -55,6 +55,20 @@ export async function loadSchemaDocument(schemaFile: string) { return loadResult.model; } +export async function loadSchemaDocumentWithServices(schemaFile: string) { + const loadResult = await loadDocument(schemaFile); + if (!loadResult.success) { + loadResult.errors.forEach((err) => { + console.error(colors.red(err)); + }); + throw new CliError('Schema contains errors. See above for details.'); + } + loadResult.warnings.forEach((warn) => { + console.warn(colors.yellow(warn)); + }); + return { services: loadResult.services, model: loadResult.model }; +} + export function handleSubProcessError(err: unknown) { if (err instanceof Error && 'status' in err && typeof err.status === 'number') { process.exit(err.status); diff --git a/packages/cli/src/actions/db.ts b/packages/cli/src/actions/db.ts index 3d010837..6c39a352 100644 --- a/packages/cli/src/actions/db.ts +++ b/packages/cli/src/actions/db.ts @@ -1,25 +1,36 @@ +import { ZModelCodeGenerator } from '@zenstackhq/sdk'; import fs from 'node:fs'; import { execPrisma } from '../utils/exec-utils'; -import { generateTempPrismaSchema, getSchemaFile, handleSubProcessError, requireDataSourceUrl } from './action-utils'; +import { generateTempPrismaSchema, getSchemaFile, handleSubProcessError, requireDataSourceUrl, loadSchemaDocumentWithServices } from './action-utils'; +import { syncEnums, syncRelation, syncTable, type Relation } from './pull'; +import { providers } from './pull/provider'; +import { getDatasource, getDbName } from './pull/utils'; -type Options = { +type PushOptions = { schema?: string; acceptDataLoss?: boolean; forceReset?: boolean; }; +type PullOptions = { + schema?: string; +}; + /** * CLI action for db related commands */ -export async function run(command: string, options: Options) { +export async function run(command: string, options: PushOptions) { switch (command) { case 'push': await runPush(options); break; + case 'pull': + await runPull(options); + break; } } -async function runPush(options: Options) { +async function runPush(options: PushOptions) { const schemaFile = getSchemaFile(options.schema); // validate datasource url exists @@ -49,3 +60,54 @@ async function runPush(options: Options) { } } } + +async function runPull(options: PullOptions) { + const schemaFile = getSchemaFile(options.schema); + const { model, services } = await loadSchemaDocumentWithServices(schemaFile); + + const SUPPORTED_PROVIDERS = ['sqlite', 'postgresql'] + const datasource = getDatasource(model) + + if (!datasource) { + throw new Error('No datasource found in the schema.') + } + + if (!SUPPORTED_PROVIDERS.includes(datasource.provider)) { + throw new Error(`Unsupported datasource provider: ${datasource.provider}`) + } + + const provider = providers[datasource.provider]; + + if (!provider) { + throw new Error( + `No introspection provider found for: ${datasource.provider}` + ) + } + + const { enums, tables } = await provider.introspect(datasource.url) + + syncEnums(enums, model) + + const resolveRelations: Relation[] = [] + for (const table of tables) { + const relations = syncTable({ table, model, provider }) + resolveRelations.push(...relations) + } + + for (const rel of resolveRelations) { + syncRelation(model, rel, services); + } + + for (const d of model.declarations) { + if (d.$type !== 'DataModel') continue + const found = tables.find((t) => getDbName(d) === t.name) + if (!found) { + delete (d.$container as any)[d.$containerProperty!][d.$containerIndex!] + } + } + + model.declarations = model.declarations.filter((d) => d !== undefined) + + const zmpdelSchema = await new ZModelCodeGenerator().generate(model) + fs.writeFileSync(schemaFile, zmpdelSchema) +} diff --git a/packages/cli/src/actions/pull/index.ts b/packages/cli/src/actions/pull/index.ts new file mode 100644 index 00000000..4651225e --- /dev/null +++ b/packages/cli/src/actions/pull/index.ts @@ -0,0 +1,238 @@ +import type { ZModelServices } from '@zenstackhq/language' +import type { + Attribute, + AttributeArg, + DataField, + DataFieldAttribute, + DataFieldType, + DataModel, + Enum, + EnumField, + Model, + UnsupportedFieldType +} from '@zenstackhq/language/ast' +import type { IntrospectedEnum, IntrospectedTable, IntrospectionProvider } from './provider' +import { getAttributeRef, getDbName } from './utils' + +export function syncEnums(dbEnums: IntrospectedEnum[], model: Model) { + for (const dbEnum of dbEnums) { + let schemaEnum = model.declarations.find( + (d) => d.$type === 'Enum' && getDbName(d) === dbEnum.enum_type + ) as Enum | undefined + + if (!schemaEnum) { + schemaEnum = { + $type: 'Enum' as const, + $container: model, + name: dbEnum.enum_type, + attributes: [], + comments: [], + fields: [], + } + model.declarations.push(schemaEnum) + } + schemaEnum.fields = dbEnum.values.map((v) => { + const existingValue = schemaEnum.fields.find((f) => getDbName(f) === v) + if (!existingValue) { + const enumField: EnumField = { + $type: 'EnumField' as const, + $container: schemaEnum, + name: v, + attributes: [], + comments: [], + } + return enumField + } + return existingValue + }) + } +} + +export type Relation = { + schema: string + table: string + column: string + type: 'one' | 'many' + fk_name: string + nullable: boolean + references: { + schema: string | null + table: string | null + column: string | null + } +} + +export function syncTable({ + model, + provider, + table, +}: { + table: IntrospectedTable + model: Model + provider: IntrospectionProvider +}) { + const relations: Relation[] = [] + let modelTable = model.declarations.find( + (d) => d.$type === 'DataModel' && getDbName(d) === table.name + ) as DataModel | undefined + + if (!modelTable) { + modelTable = { + $type: 'DataModel' as const, + $container: model, + name: table.name, + fields: [], + attributes: [], + comments: [], + isView: false, + mixins: [], + } + model.declarations.push(modelTable) + } + + modelTable.fields = table.columns.map((col) => { + if (col.foreign_key_table) { + relations.push({ + schema: table.schema, + table: table.name, + column: col.name, + type: col.unique ? 'one' : 'many', + fk_name: col.foreign_key_name!, + nullable: col.nullable, + references: { + schema: col.foreign_key_schema, + table: col.foreign_key_table, + column: col.foreign_key_column, + }, + }) + } + + const fieldPrefix = /[0-9]/g.test(col.name.charAt(0)) ? '_' : '' + const fieldName = `${fieldPrefix}${col.name}` + + const existingField = modelTable!.fields.find( + (f) => getDbName(f) === fieldName + ) + if (!existingField) { + const builtinType = provider.getBuiltinType(col.datatype) + const unsupported: UnsupportedFieldType = { + get $container() { + return type + }, + $type: 'UnsupportedFieldType' as const, + value: { + get $container() { + return unsupported + }, + $type: 'StringLiteral', + value: col.datatype, + }, + } + + const type: DataFieldType = { + get $container() { + return field + }, + $type: 'DataFieldType' as const, + type: builtinType.type === 'Unsupported' ? undefined : builtinType.type, + array: builtinType.isArray, + unsupported: + builtinType.type === 'Unsupported' ? unsupported : undefined, + optional: col.nullable, + reference: col.options.length + ? { + $refText: col.datatype, + ref: model.declarations.find( + (d) => d.$type === 'Enum' && getDbName(d) === col.datatype + ) as Enum | undefined, + } + : undefined, + } + + const field: DataField = { + $type: 'DataField' as const, + type, + $container: modelTable!, + name: fieldName, + get attributes() { + if (fieldPrefix !== '') return [] + + const attr: DataFieldAttribute = { + $type: 'DataFieldAttribute' as const, + get $container() { + return field + }, + decl: { + $refText: '@map', + ref: model.$document?.references.find( + (r) => + //@ts-ignore + r.ref.$type === 'Attribute' && r.ref.name === '@map' + )?.ref as Attribute, + }, + get args() { + const arg: AttributeArg = { + $type: 'AttributeArg' as const, + get $container() { + return attr + }, + name: 'name', + $resolvedParam: { + name: 'name', + }, + get value() { + return { + $type: 'StringLiteral' as const, + $container: arg, + value: col.name, + } + }, + } + + return [arg] + }, + } + + return [attr] + }, + comments: [], + } + return field + } + return existingField + }) + + return relations +} + +export function syncRelation(model: Model, relation: Relation, services: ZModelServices) { + const idAttribute = getAttributeRef('@id', services) + const uniqueAttribute = getAttributeRef('@unique', services) + const relationAttribute = getAttributeRef('@relation', services) + + if (!idAttribute || !uniqueAttribute || !relationAttribute) { + throw new Error('Cannot find required attributes in the model.') + } + + const sourceModel = model.declarations.find( + (d) => d.$type === 'DataModel' && getDbName(d) === relation.table + ) as DataModel | undefined + if (!sourceModel) return + + const sourceField = sourceModel.fields.find( + (f) => getDbName(f) === relation.column + ) as DataField | undefined + if (!sourceField) return + + const targetModel = model.declarations.find( + (d) => d.$type === 'DataModel' && getDbName(d) === relation.references.table + ) as DataModel | undefined + if (!targetModel) return + + const targetField = targetModel.fields.find( + (f) => getDbName(f) === relation.references.column + ) + if (!targetField) return + + //TODO: Finish relation sync +} \ No newline at end of file diff --git a/packages/cli/src/actions/pull/provider/index.ts b/packages/cli/src/actions/pull/provider/index.ts new file mode 100644 index 00000000..82ee2ac3 --- /dev/null +++ b/packages/cli/src/actions/pull/provider/index.ts @@ -0,0 +1,9 @@ +export * from './provider' + +import { postgresql } from "./postgresql"; +import { sqlite } from "./sqlite"; + +export const providers = { + postgresql, + sqlite +}; \ No newline at end of file diff --git a/packages/cli/src/actions/pull/provider/postgresql.ts b/packages/cli/src/actions/pull/provider/postgresql.ts new file mode 100644 index 00000000..10a9642a --- /dev/null +++ b/packages/cli/src/actions/pull/provider/postgresql.ts @@ -0,0 +1,242 @@ +import { Client } from 'pg' +import type { + IntrospectedEnum, + IntrospectedSchema, + IntrospectedTable, + IntrospectionProvider, +} from './provider' + +export const postgresql: IntrospectionProvider = { + getBuiltinType(type) { + const t = (type || '').toLowerCase() + + const isArray = t.startsWith('_') + + switch (t.replace(/^_/, '')) { + // integers + case 'int2': + case 'smallint': + case 'int4': + case 'integer': + return { type: 'Int', isArray } + case 'int8': + case 'bigint': + return { type: 'BigInt', isArray } + + // decimals and floats + case 'numeric': + case 'decimal': + return { type: 'Decimal', isArray } + case 'float4': + case 'real': + case 'float8': + case 'double precision': + return { type: 'Float', isArray } + + // boolean + case 'bool': + case 'boolean': + return { type: 'Boolean', isArray } + + // strings + case 'text': + case 'varchar': + case 'bpchar': + case 'character varying': + case 'character': + return { type: 'String', isArray } + + // uuid + case 'uuid': + return { type: 'String', isArray } + + // dates/times + case 'date': + case 'timestamp': + case 'timestamptz': + return { type: 'DateTime', isArray } + + // binary + case 'bytea': + return { type: 'Bytes', isArray } + + // json + case 'json': + case 'jsonb': + return { type: 'Json', isArray } + + // unsupported or postgres-specific + case 'time': + case 'timetz': + case 'interval': + case 'money': + case 'xml': + case 'bit': + case 'varbit': + case 'cidr': + case 'inet': + case 'macaddr': + case 'macaddr8': + case 'point': + case 'line': + case 'lseg': + case 'box': + case 'path': + case 'polygon': + case 'circle': + case 'tsvector': + case 'tsquery': + case 'jsonpath': + case 'hstore': + case 'oid': + case 'name': + case 'regclass': + case 'regproc': + case 'regprocedure': + case 'regoper': + case 'regoperator': + case 'regtype': + case 'regconfig': + case 'regdictionary': + case 'pg_lsn': + case 'txid_snapshot': + case 'int4range': + case 'int8range': + case 'numrange': + case 'tsrange': + case 'tstzrange': + case 'daterange': + default: + return { type: 'Unsupported' as const, isArray } + } + }, + async introspect(connectionString: string): Promise { + const client = new Client({ connectionString }) + await client.connect() + + const { rows: tables } = await client.query( + tableIntrospectionQuery + ) + const { rows: enums } = await client.query( + enumIntrospectionQuery + ) + + return { + enums, + tables, + } + }, +} + +const enumIntrospectionQuery = ` +SELECT + n.nspname AS schema_name, + t.typname AS enum_type, + coalesce(json_agg(e.enumlabel ORDER BY e.enumsortorder), '[]') AS values +FROM pg_type t +JOIN pg_enum e ON t.oid = e.enumtypid +JOIN pg_namespace n ON n.oid = t.typnamespace +GROUP BY schema_name, enum_type +ORDER BY schema_name, enum_type;` + +const tableIntrospectionQuery = ` +SELECT +"ns"."nspname" AS "schema", +"cls"."relname" AS "name", +CASE "cls"."relkind" + WHEN 'r' THEN 'table' + WHEN 'v' THEN 'view' + ELSE NULL +END AS "type", +( +SELECT +coalesce(json_agg(agg), '[]') +FROM +( + SELECT + "att"."attname" AS "name", + "typ"."typname" AS "datatype", + "tns"."nspname" AS "datatype_schema", + "fk_ns"."nspname" AS "foreign_key_schema", + "fk_cls"."relname" AS "foreign_key_table", + "fk_att"."attname" AS "foreign_key_column", + "fk_con"."conname" AS "foreign_key_name", + CASE "fk_con"."confupdtype" + WHEN 'a' THEN 'NO ACTION' + WHEN 'r' THEN 'RESTRICT' + WHEN 'c' THEN 'CASCADE' + WHEN 'n' THEN 'SET NULL' + WHEN 'd' THEN 'SET DEFAULT' + ELSE NULL + END AS "foreign_key_on_update", + CASE "fk_con"."confdeltype" + WHEN 'a' THEN 'NO ACTION' + WHEN 'r' THEN 'RESTRICT' + WHEN 'c' THEN 'CASCADE' + WHEN 'n' THEN 'SET NULL' + WHEN 'd' THEN 'SET DEFAULT' + ELSE NULL + END AS "foreign_key_on_delete", + "pk_con"."conkey" IS NOT NULL AS "pk", + ( + EXISTS ( + SELECT 1 + FROM "pg_catalog"."pg_constraint" AS "u_con" + WHERE "u_con"."contype" = 'u' + AND "u_con"."conrelid" = "cls"."oid" + AND array_length("u_con"."conkey", 1) = 1 + AND "att"."attnum" = ANY ("u_con"."conkey") + ) + OR EXISTS ( + SELECT 1 + FROM "pg_catalog"."pg_index" AS "u_idx" + WHERE "u_idx"."indrelid" = "cls"."oid" + AND "u_idx"."indisunique" = TRUE + AND "u_idx"."indnkeyatts" = 1 + AND "att"."attnum" = ANY ("u_idx"."indkey"::int2[]) + ) + ) AS "unique", + "att"."attgenerated" != '' AS "computed", + "att"."attnotnull" != TRUE AS "nullable", + coalesce( + ( + SELECT + json_agg("enm"."enumlabel") AS "o" + FROM + "pg_catalog"."pg_enum" AS "enm" + WHERE + "enm"."enumtypid" = "typ"."oid" + ), + '[]' + ) AS "options" + FROM + "pg_catalog"."pg_attribute" AS "att" + INNER JOIN "pg_catalog"."pg_type" AS "typ" ON "typ"."oid" = "att"."atttypid" + INNER JOIN "pg_catalog"."pg_namespace" AS "tns" ON "tns"."oid" = "typ"."typnamespace" + LEFT JOIN "pg_catalog"."pg_constraint" AS "pk_con" ON "pk_con"."contype" = 'p' + AND "pk_con"."conrelid" = "cls"."oid" + AND "att"."attnum" = ANY ("pk_con"."conkey") + LEFT JOIN "pg_catalog"."pg_constraint" AS "fk_con" ON "fk_con"."contype" = 'f' + AND "fk_con"."conrelid" = "cls"."oid" + AND "att"."attnum" = ANY ("fk_con"."conkey") + LEFT JOIN "pg_catalog"."pg_class" AS "fk_cls" ON "fk_cls"."oid" = "fk_con"."confrelid" + LEFT JOIN "pg_catalog"."pg_namespace" AS "fk_ns" ON "fk_ns"."oid" = "fk_cls"."relnamespace" + LEFT JOIN "pg_catalog"."pg_attribute" AS "fk_att" ON "fk_att"."attrelid" = "fk_cls"."oid" + AND "fk_att"."attnum" = ANY ("fk_con"."confkey") + WHERE + "att"."attrelid" = "cls"."oid" + AND "att"."attnum" >= 0 + AND "att"."attisdropped" != TRUE + ORDER BY "att"."attnum" +) AS agg +) AS "columns" +FROM +"pg_catalog"."pg_class" AS "cls" +INNER JOIN "pg_catalog"."pg_namespace" AS "ns" ON "cls"."relnamespace" = "ns"."oid" +WHERE +"ns"."nspname" !~ '^pg_' +AND "ns"."nspname" != 'information_schema' +AND "cls"."relkind" IN ('r', 'v') +AND "cls"."relname" !~ '^pg_' +AND "cls"."relname" !~ '_prisma_migrations' +` diff --git a/packages/cli/src/actions/pull/provider/provider.ts b/packages/cli/src/actions/pull/provider/provider.ts new file mode 100644 index 00000000..d8bd0928 --- /dev/null +++ b/packages/cli/src/actions/pull/provider/provider.ts @@ -0,0 +1,44 @@ +import type { BuiltinType } from '@zenstackhq/language/ast' + +export type Cascade = "NO ACTION" | "RESTRICT"| "CASCADE" | "SET NULL" | "SET DEFAULT" | null; + +export interface IntrospectedTable { + schema: string + name: string + type: 'table' | 'view' + columns: { + name: string + datatype: string + datatype_schema: string + foreign_key_schema: string | null + foreign_key_table: string | null + foreign_key_column: string | null + foreign_key_name: string | null + foreign_key_on_update: Cascade + foreign_key_on_delete: Cascade + pk: boolean + computed: boolean + nullable: boolean + options: string[] + unique: boolean + }[] +} + +export type IntrospectedEnum = { + schema_name: string + enum_type: string + values: string[] +} + +export type IntrospectedSchema = { + tables: IntrospectedTable[] + enums: IntrospectedEnum[] +} + +export interface IntrospectionProvider { + introspect(connectionString: string): Promise + getBuiltinType(type: string): { + type: BuiltinType | 'Unsupported' + isArray: boolean + } +} diff --git a/packages/cli/src/actions/pull/provider/sqlite.ts b/packages/cli/src/actions/pull/provider/sqlite.ts new file mode 100644 index 00000000..61883ef9 --- /dev/null +++ b/packages/cli/src/actions/pull/provider/sqlite.ts @@ -0,0 +1,191 @@ +import type { IntrospectedEnum, IntrospectedSchema, IntrospectedTable, IntrospectionProvider } from './provider' + +// Note: We dynamically import better-sqlite3 inside the async function to avoid +// requiring it at module load time for environments that don't use SQLite. + +export const sqlite: IntrospectionProvider = { + getBuiltinType(type) { + const t = (type || '').toLowerCase().trim() + + // SQLite has no array types + const isArray = false + + switch (t) { + // integers + case 'int': + case 'integer': + case 'tinyint': + case 'smallint': + case 'mediumint': + return { type: 'Int', isArray } + case 'bigint': + return { type: 'BigInt', isArray } + + // decimals and floats + case 'numeric': + case 'decimal': + return { type: 'Decimal', isArray } + case 'real': + case 'double': + case 'double precision': + case 'float': + return { type: 'Float', isArray } + + // boolean (SQLite stores as integer 0/1, but commonly typed as BOOLEAN) + case 'bool': + case 'boolean': + return { type: 'Boolean', isArray } + + // strings + case 'text': + case 'varchar': + case 'character varying': + case 'char': + case 'character': + case 'clob': + case 'uuid': // often stored as TEXT + return { type: 'String', isArray } + + // dates/times (stored as TEXT/REAL/INTEGER, but commonly typed as DATE/DATETIME) + case 'date': + case 'datetime': + return { type: 'DateTime', isArray } + + // binary + case 'blob': + return { type: 'Bytes', isArray } + + // json (not a native type, but commonly used) + case 'json': + return { type: 'Json', isArray } + + default: { + // Fallbacks based on SQLite type affinity rules + if (t.includes('int')) return { type: 'Int', isArray } + if (t.includes('char') || t.includes('clob') || t.includes('text')) return { type: 'String', isArray } + if (t.includes('blob')) return { type: 'Bytes', isArray } + if (t.includes('real') || t.includes('floa') || t.includes('doub')) return { type: 'Float', isArray } + if (t.includes('dec') || t.includes('num')) return { type: 'Decimal', isArray } + return { type: 'Unsupported' as const, isArray } + } + } + }, + + async introspect(connectionString: string): Promise { + const SQLite = (await import('better-sqlite3')).default + const db = new SQLite(connectionString, { readonly: true }) + + try { + const all = (sql: string): T[] => { + const stmt: any = db.prepare(sql) + return stmt.all() as T[] + } + + // List user tables and views (exclude internal sqlite_*) + const tablesRaw = all<{ name: string; type: 'table' | 'view' }>( + "SELECT name, type FROM sqlite_schema WHERE type IN ('table','view') AND name NOT LIKE 'sqlite_%' ORDER BY name" + ) + + const tables: IntrospectedTable[] = [] + + for (const t of tablesRaw) { + const tableName = t.name + const schema = 'main' + + // Columns with extended info; filter out hidden=1 (internal/rowid), mark computed if hidden=2 (generated) + const columnsInfo = all<{ + cid: number + name: string + type: string + notnull: number + dflt_value: string | null + pk: number + hidden?: number + }>(`PRAGMA table_xinfo('${tableName.replace(/'/g, "''")}')`) + + // Unique columns detection via unique indexes with single column + const uniqueIndexRows = all<{ name: string; unique: number }>( + `PRAGMA index_list('${tableName.replace(/'/g, "''")}')` + ).filter((r) => r.unique === 1) + + const uniqueSingleColumn = new Set() + for (const idx of uniqueIndexRows) { + const idxCols = all<{ name: string }>( + `PRAGMA index_info('${idx.name.replace(/'/g, "''")}')` + ) + if (idxCols.length === 1 && idxCols[0]?.name) { + uniqueSingleColumn.add(idxCols[0].name) + } + } + + // Foreign keys mapping by column name + const fkRows = all<{ + id: number + seq: number + table: string + from: string + to: string | null + on_update: any + on_delete: any + }>(`PRAGMA foreign_key_list('${tableName.replace(/'/g, "''")}')`) + + const fkByColumn = new Map< + string, + { + foreign_key_schema: string | null + foreign_key_table: string | null + foreign_key_column: string | null + foreign_key_name: string | null + foreign_key_on_update: IntrospectedTable['columns'][number]['foreign_key_on_update'] + foreign_key_on_delete: IntrospectedTable['columns'][number]['foreign_key_on_delete'] + } + >() + + for (const fk of fkRows) { + fkByColumn.set(fk.from, { + foreign_key_schema: 'main', + foreign_key_table: fk.table || null, + foreign_key_column: fk.to || null, + foreign_key_name: null, + foreign_key_on_update: (fk.on_update as any) ?? null, + foreign_key_on_delete: (fk.on_delete as any) ?? null, + }) + } + + const columns: IntrospectedTable['columns'] = [] + for (const c of columnsInfo) { + // hidden: 1 (hidden/internal) -> skip; 2 (generated) -> mark computed + const hidden = c.hidden ?? 0 + if (hidden === 1) continue + + const fk = fkByColumn.get(c.name) + + columns.push({ + name: c.name, + datatype: c.type || '', + datatype_schema: schema, + foreign_key_schema: fk?.foreign_key_schema ?? null, + foreign_key_table: fk?.foreign_key_table ?? null, + foreign_key_column: fk?.foreign_key_column ?? null, + foreign_key_name: fk?.foreign_key_name ?? null, + foreign_key_on_update: fk?.foreign_key_on_update ?? null, + foreign_key_on_delete: fk?.foreign_key_on_delete ?? null, + pk: !!c.pk, + computed: hidden === 2, + nullable: c.notnull !== 1, + options: [], + unique: uniqueSingleColumn.has(c.name), + }) + } + + tables.push({ schema, name: tableName, columns, type: t.type }) + } + + const enums: IntrospectedEnum[] = [] // SQLite doesn't support enums + + return { tables, enums } + } finally { + db.close() + } + }, +} diff --git a/packages/cli/src/actions/pull/utils.ts b/packages/cli/src/actions/pull/utils.ts new file mode 100644 index 00000000..b611fbca --- /dev/null +++ b/packages/cli/src/actions/pull/utils.ts @@ -0,0 +1,67 @@ +import type { ZModelServices } from '@zenstackhq/language' +import { + DataField, + EnumField, + isInvocationExpr, + type AbstractDeclaration, + type Attribute, + type Model, +} from '@zenstackhq/language/ast' +import { getStringLiteral } from '@zenstackhq/language/utils' +import type { + DataSourceProviderType +} from '@zenstackhq/sdk/schema' +import type { Reference } from 'langium' + +export function getAttribute(model: Model, attrName: string) { + const references = model.$document! + .references as Reference[] + return references.find( + (a) => a.ref!.$type === 'Attribute' && a.ref!.name === attrName + )?.ref as Attribute | undefined +} + +export function getDatasource(model: Model) { + const datasource = model.declarations.find((d) => d.$type === 'DataSource') + if (!datasource) { + throw new Error('No datasource declaration found in the schema.') + } + + const urlField = datasource.fields.find((f) => f.name === 'url')! + let url = getStringLiteral(urlField.value) + + if (!url && isInvocationExpr(urlField.value)) { + url = process.env[getStringLiteral(urlField.value.args[0]) as string]! + } + + if (!url) { + throw new Error('The url field must be a string literal or an env().') + } + + return { + name: datasource.name, + provider: getStringLiteral( + datasource.fields.find((f) => f.name === 'provider')?.value + ) as DataSourceProviderType, + url, + } +} + +export function getDbName( + decl: AbstractDeclaration | DataField | EnumField +): string { + if (!('attributes' in decl)) return decl.name + const nameAttr = decl.attributes.find( + (a) => a.decl.ref?.name === '@@map' || a.decl.ref?.name === '@map' + ) + if (!nameAttr) return decl.name + const attrValue = nameAttr.args[0]?.value + + if (attrValue?.$type !== 'StringLiteral') return decl.name + + return attrValue.value +} + +export function getAttributeRef(name: string, services: ZModelServices) { + return services.shared.workspace.IndexManager.allElements("Attribute").find(a => a.name === name) as Attribute | undefined +} \ No newline at end of file diff --git a/packages/language/src/document.ts b/packages/language/src/document.ts index 17146f85..60ea16b5 100644 --- a/packages/language/src/document.ts +++ b/packages/language/src/document.ts @@ -12,7 +12,7 @@ import path from 'node:path'; import { fileURLToPath } from 'node:url'; import { isDataSource, type Model } from './ast'; import { STD_LIB_MODULE_NAME } from './constants'; -import { createZModelServices } from './module'; +import { createZModelServices, type ZModelServices } from './module'; import { getDataModelAndTypeDefs, getDocument, hasAttribute, resolveImport, resolveTransitiveImports } from './utils'; import type { ZModelFormatter } from './zmodel-formatter'; @@ -24,7 +24,7 @@ export async function loadDocument( fileName: string, additionalModelFiles: string[] = [], ): Promise< - { success: true; model: Model; warnings: string[] } | { success: false; errors: string[]; warnings: string[] } + { success: true; model: Model; warnings: string[], services: ZModelServices } | { success: false; errors: string[]; warnings: string[] } > { const { ZModelLanguage: services } = createZModelServices(false); const extensions = services.LanguageMetaData.fileExtensions; @@ -134,6 +134,7 @@ export async function loadDocument( return { success: true, model: document.parseResult.value as Model, + services, warnings, }; } diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 4ee7c5e8..66977660 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -199,6 +199,9 @@ importers: '@types/better-sqlite3': specifier: 'catalog:' version: 7.6.13 + '@types/pg': + specifier: ^8.11.11 + version: 8.11.11 '@types/semver': specifier: ^7.7.0 version: 7.7.0 @@ -223,6 +226,9 @@ importers: better-sqlite3: specifier: 'catalog:' version: 12.5.0 + pg: + specifier: ^8.16.3 + version: 8.16.3 tmp: specifier: 'catalog:' version: 0.2.3 @@ -5949,6 +5955,9 @@ packages: pg-connection-string@2.9.1: resolution: {integrity: sha512-nkc6NpDcvPVpZXxrreI/FOtX3XemeLl8E0qFr6F2Lrm/I8WOnaWNhIPK2Z7OHpw7gh5XJThi6j6ppgNoaT1w4w==} + pg-connection-string@2.9.1: + resolution: {integrity: sha512-nkc6NpDcvPVpZXxrreI/FOtX3XemeLl8E0qFr6F2Lrm/I8WOnaWNhIPK2Z7OHpw7gh5XJThi6j6ppgNoaT1w4w==} + pg-int8@1.0.1: resolution: {integrity: sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw==} engines: {node: '>=4.0.0'} @@ -5982,6 +5991,15 @@ packages: pg-native: optional: true + pg@8.16.3: + resolution: {integrity: sha512-enxc1h0jA/aq5oSDMvqyW3q89ra6XIIDZgCX9vkMrnz5DFTw/Ny3Li2lFQ+pt3L6MCgm/5o2o8HW9hiJji+xvw==} + engines: {node: '>= 16.0.0'} + peerDependencies: + pg-native: '>=3.0.1' + peerDependenciesMeta: + pg-native: + optional: true + pgpass@1.0.5: resolution: {integrity: sha512-FdW9r/jQZhSeohs1Z3sI1yxFQNFvMcnmfuj4WBMUTxOrAyLMaTcE1aAMBiTlbMNaXvBCQuVi0R7hd8udDSP7ug==} @@ -13258,6 +13276,8 @@ snapshots: pg-connection-string@2.9.1: {} + pg-connection-string@2.9.1: {} + pg-int8@1.0.1: {} pg-numeric@1.0.2: {} @@ -13296,6 +13316,16 @@ snapshots: optionalDependencies: pg-cloudflare: 1.2.7 + pg@8.16.3: + dependencies: + pg-connection-string: 2.9.1 + pg-pool: 3.10.1(pg@8.16.3) + pg-protocol: 1.10.3 + pg-types: 2.2.0 + pgpass: 1.0.5 + optionalDependencies: + pg-cloudflare: 1.2.7 + pgpass@1.0.5: dependencies: split2: 4.2.0 From 16d6dc0334d0ef7e1a807e7f1769bb55d3de60e5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Thu, 25 Sep 2025 00:49:11 +0200 Subject: [PATCH 02/29] fix: generate imports and attributes for zmodel-code-generator --- packages/language/src/zmodel-code-generator.ts | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/packages/language/src/zmodel-code-generator.ts b/packages/language/src/zmodel-code-generator.ts index 55efb5fc..5277f26a 100644 --- a/packages/language/src/zmodel-code-generator.ts +++ b/packages/language/src/zmodel-code-generator.ts @@ -103,10 +103,18 @@ ${ast.fields.map((x) => this.indent + this.generate(x)).join('\n')} }`; } + @gen(ModelImport) + private _generateModelImport(ast: ModelImport) { + return `import '${ast.path}'`; + } + @gen(Enum) private _generateEnum(ast: Enum) { return `enum ${ast.name} { -${ast.fields.map((x) => this.indent + this.generate(x)).join('\n')} +${ast.fields.map((x) => this.indent + this.generate(x)).join('\n')}${ast.attributes.length > 0 + ? '\n\n' + ast.attributes.map((x) => this.indent + this.generate(x)).join('\n') + : '' + } }`; } From a997864cf2d9f200807f244a1c195e96009b1120 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Thu, 25 Sep 2025 00:50:08 +0200 Subject: [PATCH 03/29] fix: add option to not exclude imports in loadDocument --- packages/language/src/document.ts | 20 ++++++++++++-------- 1 file changed, 12 insertions(+), 8 deletions(-) diff --git a/packages/language/src/document.ts b/packages/language/src/document.ts index 60ea16b5..b7ec0745 100644 --- a/packages/language/src/document.ts +++ b/packages/language/src/document.ts @@ -23,8 +23,10 @@ import type { ZModelFormatter } from './zmodel-formatter'; export async function loadDocument( fileName: string, additionalModelFiles: string[] = [], + keepImports: boolean = false, ): Promise< - { success: true; model: Model; warnings: string[], services: ZModelServices } | { success: false; errors: string[]; warnings: string[] } + | { success: true; model: Model; warnings: string[]; services: ZModelServices } + | { success: false; errors: string[]; warnings: string[] } > { const { ZModelLanguage: services } = createZModelServices(false); const extensions = services.LanguageMetaData.fileExtensions; @@ -112,14 +114,16 @@ export async function loadDocument( const model = document.parseResult.value as Model; - // merge all declarations into the main document - const imported = mergeImportsDeclarations(langiumDocuments, model); + if (keepImports === false) { + // merge all declarations into the main document + const imported = mergeImportsDeclarations(langiumDocuments, model); - // remove imported documents - imported.forEach((model) => { - langiumDocuments.deleteDocument(model.$document!.uri); - services.shared.workspace.IndexManager.remove(model.$document!.uri); - }); + // remove imported documents + imported.forEach((model) => { + langiumDocuments.deleteDocument(model.$document!.uri); + services.shared.workspace.IndexManager.remove(model.$document!.uri); + }); + } // extra validation after merging imported declarations const additionalErrors = validationAfterImportMerge(model); From f7a88be3f8084dcf4b17e38b0580f898eef615f8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Thu, 25 Sep 2025 00:53:50 +0200 Subject: [PATCH 04/29] fix: continue work on db pull --- packages/cli/package.json | 1 + packages/cli/src/actions/action-utils.ts | 2 +- packages/cli/src/actions/db.ts | 24 ++-- packages/cli/src/actions/pull/index.ts | 132 +++++++++++---------- packages/cli/src/actions/pull/utils.ts | 33 +++++- packages/cli/src/index.ts | 8 ++ pnpm-lock.yaml | 141 +++++++++++++++++++---- 7 files changed, 241 insertions(+), 100 deletions(-) diff --git a/packages/cli/package.json b/packages/cli/package.json index 4826028f..1b5f9454 100644 --- a/packages/cli/package.json +++ b/packages/cli/package.json @@ -29,6 +29,7 @@ "pack": "pnpm pack" }, "dependencies": { + "@dotenvx/dotenvx": "^1.51.0", "@zenstackhq/common-helpers": "workspace:*", "@zenstackhq/language": "workspace:*", "@zenstackhq/sdk": "workspace:*", diff --git a/packages/cli/src/actions/action-utils.ts b/packages/cli/src/actions/action-utils.ts index 6ef29821..32e38fe2 100644 --- a/packages/cli/src/actions/action-utils.ts +++ b/packages/cli/src/actions/action-utils.ts @@ -56,7 +56,7 @@ export async function loadSchemaDocument(schemaFile: string) { } export async function loadSchemaDocumentWithServices(schemaFile: string) { - const loadResult = await loadDocument(schemaFile); + const loadResult = await loadDocument(schemaFile, [], true); if (!loadResult.success) { loadResult.errors.forEach((err) => { console.error(colors.red(err)); diff --git a/packages/cli/src/actions/db.ts b/packages/cli/src/actions/db.ts index 6c39a352..e79073e3 100644 --- a/packages/cli/src/actions/db.ts +++ b/packages/cli/src/actions/db.ts @@ -1,5 +1,6 @@ import { ZModelCodeGenerator } from '@zenstackhq/sdk'; import fs from 'node:fs'; +import path from 'node:path'; import { execPrisma } from '../utils/exec-utils'; import { generateTempPrismaSchema, getSchemaFile, handleSubProcessError, requireDataSourceUrl, loadSchemaDocumentWithServices } from './action-utils'; import { syncEnums, syncRelation, syncTable, type Relation } from './pull'; @@ -14,6 +15,7 @@ type PushOptions = { type PullOptions = { schema?: string; + out?: string; }; /** @@ -64,7 +66,7 @@ async function runPush(options: PushOptions) { async function runPull(options: PullOptions) { const schemaFile = getSchemaFile(options.schema); const { model, services } = await loadSchemaDocumentWithServices(schemaFile); - + await import("@dotenvx/dotenvx/config") const SUPPORTED_PROVIDERS = ['sqlite', 'postgresql'] const datasource = getDatasource(model) @@ -86,16 +88,16 @@ async function runPull(options: PullOptions) { const { enums, tables } = await provider.introspect(datasource.url) - syncEnums(enums, model) + syncEnums({ dbEnums: enums, model, services }) const resolveRelations: Relation[] = [] for (const table of tables) { - const relations = syncTable({ table, model, provider }) + const relations = syncTable({ table, model, provider, services }) resolveRelations.push(...relations) } - for (const rel of resolveRelations) { - syncRelation(model, rel, services); + for (const relation of resolveRelations) { + syncRelation({ model, relation, services }); } for (const d of model.declarations) { @@ -108,6 +110,14 @@ async function runPull(options: PullOptions) { model.declarations = model.declarations.filter((d) => d !== undefined) - const zmpdelSchema = await new ZModelCodeGenerator().generate(model) - fs.writeFileSync(schemaFile, zmpdelSchema) + const generator = await new ZModelCodeGenerator(); + + const zmodelSchema = await generator.generate(model) + + console.log(options.out ? `Writing to ${options.out}` : schemaFile); + + const outPath = options.out ? path.resolve(options.out) : schemaFile; + console.log(outPath); + + fs.writeFileSync(outPath, zmodelSchema) } diff --git a/packages/cli/src/actions/pull/index.ts b/packages/cli/src/actions/pull/index.ts index 4651225e..6a7e2ba2 100644 --- a/packages/cli/src/actions/pull/index.ts +++ b/packages/cli/src/actions/pull/index.ts @@ -1,5 +1,6 @@ import type { ZModelServices } from '@zenstackhq/language' import type { + ArrayExpr, Attribute, AttributeArg, DataField, @@ -9,18 +10,20 @@ import type { Enum, EnumField, Model, + ReferenceExpr, + StringLiteral, UnsupportedFieldType } from '@zenstackhq/language/ast' +import { getStringLiteral } from '@zenstackhq/language/utils' import type { IntrospectedEnum, IntrospectedTable, IntrospectionProvider } from './provider' -import { getAttributeRef, getDbName } from './utils' +import { getAttributeRef, getDbName, getEnumRef, getModelRef } from './utils' -export function syncEnums(dbEnums: IntrospectedEnum[], model: Model) { +export function syncEnums({ dbEnums, model, services }: { dbEnums: IntrospectedEnum[], model: Model, services: ZModelServices }) { for (const dbEnum of dbEnums) { - let schemaEnum = model.declarations.find( - (d) => d.$type === 'Enum' && getDbName(d) === dbEnum.enum_type - ) as Enum | undefined + let schemaEnum = getEnumRef(dbEnum.enum_type, services); if (!schemaEnum) { + console.log(`Adding enum for type ${dbEnum.enum_type}`); schemaEnum = { $type: 'Enum' as const, $container: model, @@ -66,17 +69,29 @@ export function syncTable({ model, provider, table, + services }: { table: IntrospectedTable model: Model provider: IntrospectionProvider + services: ZModelServices }) { + const idAttribute = getAttributeRef('@id', services) + const uniqueAttribute = getAttributeRef('@unique', services) + const relationAttribute = getAttributeRef('@relation', services) + const fieldMapAttribute = getAttributeRef('@map', services) + const tableMapAttribute = getAttributeRef('@@map', services) + + if (!idAttribute || !uniqueAttribute || !relationAttribute || !fieldMapAttribute || !tableMapAttribute) { + throw new Error('Cannot find required attributes in the model.') + } + const relations: Relation[] = [] - let modelTable = model.declarations.find( - (d) => d.$type === 'DataModel' && getDbName(d) === table.name - ) as DataModel | undefined + let modelTable = getModelRef(table.name, services) if (!modelTable) { + console.log(`Adding model for table ${table.name}`); + modelTable = { $type: 'DataModel' as const, $container: model, @@ -96,7 +111,7 @@ export function syncTable({ schema: table.schema, table: table.name, column: col.name, - type: col.unique ? 'one' : 'many', + type: 'one', fk_name: col.foreign_key_name!, nullable: col.nullable, references: { @@ -115,67 +130,54 @@ export function syncTable({ ) if (!existingField) { const builtinType = provider.getBuiltinType(col.datatype) - const unsupported: UnsupportedFieldType = { - get $container() { - return type - }, - $type: 'UnsupportedFieldType' as const, - value: { - get $container() { - return unsupported - }, - $type: 'StringLiteral', - value: col.datatype, - }, - } - - const type: DataFieldType = { - get $container() { - return field - }, - $type: 'DataFieldType' as const, - type: builtinType.type === 'Unsupported' ? undefined : builtinType.type, - array: builtinType.isArray, - unsupported: - builtinType.type === 'Unsupported' ? unsupported : undefined, - optional: col.nullable, - reference: col.options.length - ? { + const field: DataField = { + $type: 'DataField' as const, + get type() { + return { + $container: this, + $type: 'DataFieldType' as const, + type: builtinType.type === 'Unsupported' ? undefined : builtinType.type, + array: builtinType.isArray, + get unsupported() { + return builtinType.type === 'Unsupported' ? { + $container: this, + $type: 'UnsupportedFieldType' as const, + get value() { + return { + $container: this, + $type: 'StringLiteral', + value: col.datatype, + } satisfies StringLiteral + }, + } satisfies UnsupportedFieldType : undefined + }, + optional: col.nullable, + reference: col.options.length + ? { $refText: col.datatype, ref: model.declarations.find( (d) => d.$type === 'Enum' && getDbName(d) === col.datatype - ) as Enum | undefined, - } - : undefined, - } - - const field: DataField = { - $type: 'DataField' as const, - type, + ) as Enum | undefined, + } + : undefined, + } satisfies DataFieldType + }, $container: modelTable!, name: fieldName, get attributes() { if (fieldPrefix !== '') return [] - const attr: DataFieldAttribute = { + return [{ $type: 'DataFieldAttribute' as const, - get $container() { - return field - }, + $container: this, decl: { $refText: '@map', - ref: model.$document?.references.find( - (r) => - //@ts-ignore - r.ref.$type === 'Attribute' && r.ref.name === '@map' - )?.ref as Attribute, + ref: fieldMapAttribute, }, get args() { - const arg: AttributeArg = { + return [{ $type: 'AttributeArg' as const, - get $container() { - return attr - }, + $container: this, name: 'name', $resolvedParam: { name: 'name', @@ -183,17 +185,13 @@ export function syncTable({ get value() { return { $type: 'StringLiteral' as const, - $container: arg, + $container: this, value: col.name, } }, - } - - return [arg] + }] satisfies AttributeArg[] }, - } - - return [attr] + }] satisfies DataFieldAttribute[] }, comments: [], } @@ -205,10 +203,16 @@ export function syncTable({ return relations } -export function syncRelation(model: Model, relation: Relation, services: ZModelServices) { +export function syncRelation({ model, relation, services }: { model: Model, relation: Relation, services: ZModelServices }) { const idAttribute = getAttributeRef('@id', services) const uniqueAttribute = getAttributeRef('@unique', services) const relationAttribute = getAttributeRef('@relation', services) + const fieldMapAttribute = getAttributeRef('@map', services) + const tableMapAttribute = getAttributeRef('@@map', services) + + if (!idAttribute || !uniqueAttribute || !relationAttribute || !fieldMapAttribute || !tableMapAttribute) { + throw new Error('Cannot find required attributes in the model.') + } if (!idAttribute || !uniqueAttribute || !relationAttribute) { throw new Error('Cannot find required attributes in the model.') diff --git a/packages/cli/src/actions/pull/utils.ts b/packages/cli/src/actions/pull/utils.ts index b611fbca..defd0f30 100644 --- a/packages/cli/src/actions/pull/utils.ts +++ b/packages/cli/src/actions/pull/utils.ts @@ -1,11 +1,13 @@ import type { ZModelServices } from '@zenstackhq/language' import { + AbstractDeclaration, DataField, + DataModel, + Enum, EnumField, isInvocationExpr, - type AbstractDeclaration, type Attribute, - type Model, + type Model } from '@zenstackhq/language/ast' import { getStringLiteral } from '@zenstackhq/language/utils' import type { @@ -28,10 +30,20 @@ export function getDatasource(model: Model) { } const urlField = datasource.fields.find((f) => f.name === 'url')! + let url = getStringLiteral(urlField.value) if (!url && isInvocationExpr(urlField.value)) { - url = process.env[getStringLiteral(urlField.value.args[0]) as string]! + const envName = getStringLiteral(urlField.value.args[0]?.value) + if (!envName) { + throw new Error('The url field must be a string literal or an env().') + } + if (!process.env[envName]) { + throw new Error( + `Environment variable ${envName} is not set, please set it to the database connection string.` + ) + } + url = process.env[envName] } if (!url) { @@ -62,6 +74,19 @@ export function getDbName( return attrValue.value } + +export function getDeclarationRef(type: T["$type"], name: string, services: ZModelServices) { + return services.shared.workspace.IndexManager.allElements(type).find((m) => m.node && getDbName(m.node as T) === name)?.node as T | undefined +} + +export function getEnumRef(name: string, services: ZModelServices) { + return getDeclarationRef('Enum', name, services); +} + +export function getModelRef(name: string, services: ZModelServices) { + return getDeclarationRef('DataModel', name, services); +} + export function getAttributeRef(name: string, services: ZModelServices) { - return services.shared.workspace.IndexManager.allElements("Attribute").find(a => a.name === name) as Attribute | undefined + return getDeclarationRef('Attribute', name, services); } \ No newline at end of file diff --git a/packages/cli/src/index.ts b/packages/cli/src/index.ts index b48fc321..7e2e5c19 100644 --- a/packages/cli/src/index.ts +++ b/packages/cli/src/index.ts @@ -137,6 +137,14 @@ function createProgram() { .addOption(new Option('--force-reset', 'force a reset of the database before push')) .action((options) => dbAction('push', options)); + dbCommand + .command('pull') + .description('Introspect your database.') + .addOption(schemaOption) + .addOption(noVersionCheckOption) + .addOption(new Option('--out ', 'add custom output path for the introspected schema')) + .action((options) => dbAction('pull', options)); + dbCommand .command('seed') .description('Seed the database') diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 66977660..1236121a 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -153,6 +153,9 @@ importers: packages/cli: dependencies: + '@dotenvx/dotenvx': + specifier: ^1.51.0 + version: 1.51.1 '@zenstackhq/common-helpers': specifier: workspace:* version: link:../common-helpers @@ -1179,6 +1182,10 @@ packages: resolution: {integrity: sha512-Vd/9EVDiu6PPJt9yAh6roZP6El1xHrdvIVGjyBsHR0RYwNHgL7FJPyIIW4fANJNG6FtyZfvlRPpFI4ZM/lubvw==} engines: {node: '>=18'} + '@dotenvx/dotenvx@1.51.1': + resolution: {integrity: sha512-fqcQxcxC4LOaUlW8IkyWw8x0yirlLUkbxohz9OnWvVWjf73J5yyw7jxWnkOJaUKXZotcGEScDox9MU6rSkcDgg==} + hasBin: true + '@dxup/nuxt@0.2.0': resolution: {integrity: sha512-tUS2040HEiGwjwZ8hTczfuRoiXSOuA+ATPXO9Bllf03nHHj1lSlmaAyVJHFsSXL5Os5NZqimNAZ1iDed7VElzA==} @@ -1193,6 +1200,12 @@ packages: resolution: {integrity: sha512-NKBGBSIKUG584qrS1tyxVpX/AKJKQw5HgjYEnPLC0QsTw79JrGn+qUr8CXFb955Iy7GUdiiUv1rJ6JBGvaKb6w==} engines: {node: '>=18'} + '@ecies/ciphers@0.2.5': + resolution: {integrity: sha512-GalEZH4JgOMHYYcYmVqnFirFsjZHeoGMDt9IxEnM9F7GRUUyUksJ7Ou53L83WHJq3RWKD3AcBpo0iQh0oMpf8A==} + engines: {bun: '>=1', deno: '>=2', node: '>=16'} + peerDependencies: + '@noble/ciphers': ^1.0.0 + '@emnapi/core@1.6.0': resolution: {integrity: sha512-zq/ay+9fNIJJtJiZxdTnXS20PllcYMX3OE23ESc4HK/bdYu3cOWYVhsOhVnXALfU/uqJIxn5NBPd9z4v+SfoSg==} @@ -1847,14 +1860,26 @@ packages: cpu: [x64] os: [win32] + '@noble/ciphers@1.3.0': + resolution: {integrity: sha512-2I0gnIVPtfnMw9ee9h1dJG7tp81+8Ob3OJb3Mv37rx5L40/b0i7djjCVvGOVqc9AEIQyvyu1i6ypKdFw8R8gQw==} + engines: {node: ^14.21.3 || >=16} + '@noble/ciphers@2.0.1': resolution: {integrity: sha512-xHK3XHPUW8DTAobU+G0XT+/w+JLM7/8k1UFdB5xg/zTFPnFCobhftzw8wl4Lw2aq/Rvir5pxfZV5fEazmeCJ2g==} engines: {node: '>= 20.19.0'} + '@noble/curves@1.9.7': + resolution: {integrity: sha512-gbKGcRUYIjA3/zCCNaWDciTMFI0dCkvou3TL8Zmy5Nc7sJ47a0jtOeZoTaMxkuqRo9cRhjOdZJXegxYE5FN/xw==} + engines: {node: ^14.21.3 || >=16} + '@noble/hashes@1.7.1': resolution: {integrity: sha512-B8XBPsn4vT/KJAGqDzbwztd+6Yte3P4V7iafm24bxgDe/mlRuK6xmWPuCNrKt2vDafZ8MfJLlchDG/vYafQEjQ==} engines: {node: ^14.21.3 || >=16} + '@noble/hashes@1.8.0': + resolution: {integrity: sha512-jCs9ldd7NwzpgXDIf6P3+NrHh9/sD6CQdxHyjQI+h/6rDNo88ypBxxz45UDuZHz9r3tNz7N/VInSVoVdtXEI4A==} + engines: {node: ^14.21.3 || >=16} + '@noble/hashes@2.0.1': resolution: {integrity: sha512-XlOlEbQcE9fmuXxrVTXCTlG2nlRXa9Rj3rr5Ue/+tX+nmkgbX720YHh0VR3hBF9xDvwnb8D2shVGOwNx+ulArw==} engines: {node: '>= 20.19.0'} @@ -4237,6 +4262,10 @@ packages: eastasianwidth@0.2.0: resolution: {integrity: sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==} + eciesjs@0.4.16: + resolution: {integrity: sha512-dS5cbA9rA2VR4Ybuvhg6jvdmp46ubLn3E+px8cG/35aEDNclrqoCjg6mt0HYZ/M+OoESS3jSkCrqk1kWAEhWAw==} + engines: {bun: '>=1', deno: '>=2', node: '>=16'} + ee-first@1.1.1: resolution: {integrity: sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==} @@ -4507,6 +4536,10 @@ packages: '@sinclair/typebox': optional: true + execa@5.1.1: + resolution: {integrity: sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==} + engines: {node: '>=10'} + execa@8.0.1: resolution: {integrity: sha512-VyhnebXciFV2DESc+p6B+y0LjSm0krU4OgJN44qFAhBY0TJ+1V61tYD2+wHusZ6F9n5K+vl8k0sTy7PEfV4qpg==} engines: {node: '>=16.17'} @@ -4718,6 +4751,10 @@ packages: resolution: {integrity: sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==} engines: {node: '>= 0.4'} + get-stream@6.0.1: + resolution: {integrity: sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==} + engines: {node: '>=10'} + get-stream@8.0.1: resolution: {integrity: sha512-VaUJspBffn/LMCJVoMvSAdmscJyS1auj5Zulnn5UoYcY531UWmdwhRWkcGKnGU93m5HSXP9LP2usOryrBtQowA==} engines: {node: '>=16'} @@ -4871,6 +4908,10 @@ packages: httpxy@0.1.7: resolution: {integrity: sha512-pXNx8gnANKAndgga5ahefxc++tJvNL87CXoRwxn1cJE2ZkWEojF3tNfQIEhZX/vfpt+wzeAzpUI4qkediX1MLQ==} + human-signals@2.1.0: + resolution: {integrity: sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==} + engines: {node: '>=10.17.0'} + human-signals@5.0.0: resolution: {integrity: sha512-AXcZb6vzzrFAUE61HnN4mpLqd/cSIwNQjtNWR0euPm6y0iqx3G4gOXaIDdtdDwZmhwe82LA6+zinmW4UBWVePQ==} engines: {node: '>=16.17.0'} @@ -5729,6 +5770,10 @@ packages: resolution: {integrity: sha512-bdok/XvKII3nUpklnV6P2hxtMNrCboOjAcyBuQnWEhO665FwrSNRxU+AqpsyvO6LgGYPspN+lu5CLtw4jPRKNA==} engines: {node: '>=0.10.0'} + npm-run-path@4.0.1: + resolution: {integrity: sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==} + engines: {node: '>=8'} + npm-run-path@5.3.0: resolution: {integrity: sha512-ppwTtiJZq0O/ai0z7yfudtBpWIoxM8yE6nHi1X47eFR2EWORqfbu6CnPlNsjeN683eT0qG6H/Pyf9fCcvjnnnQ==} engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} @@ -5770,6 +5815,10 @@ packages: resolution: {integrity: sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==} engines: {node: '>= 0.4'} + object-treeify@1.1.33: + resolution: {integrity: sha512-EFVjAYfzWqWsBMRHPMAXLCDIJnpMhdWAqR7xG6M6a2cs6PMFpl/+Z20w9zDW4vkxOFfddegBKq9Rehd0bxWE7A==} + engines: {node: '>= 10'} + object.assign@4.1.7: resolution: {integrity: sha512-nK28WOo+QIjBkDduTINE4JkF/UJJKyf2EJxvJKfblDpyg0Q+pkOHNTL0Qwy6NP6FhE/EnzV73BxxqcJaXY9anw==} engines: {node: '>= 0.4'} @@ -5955,9 +6004,6 @@ packages: pg-connection-string@2.9.1: resolution: {integrity: sha512-nkc6NpDcvPVpZXxrreI/FOtX3XemeLl8E0qFr6F2Lrm/I8WOnaWNhIPK2Z7OHpw7gh5XJThi6j6ppgNoaT1w4w==} - pg-connection-string@2.9.1: - resolution: {integrity: sha512-nkc6NpDcvPVpZXxrreI/FOtX3XemeLl8E0qFr6F2Lrm/I8WOnaWNhIPK2Z7OHpw7gh5XJThi6j6ppgNoaT1w4w==} - pg-int8@1.0.1: resolution: {integrity: sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw==} engines: {node: '>=4.0.0'} @@ -5991,15 +6037,6 @@ packages: pg-native: optional: true - pg@8.16.3: - resolution: {integrity: sha512-enxc1h0jA/aq5oSDMvqyW3q89ra6XIIDZgCX9vkMrnz5DFTw/Ny3Li2lFQ+pt3L6MCgm/5o2o8HW9hiJji+xvw==} - engines: {node: '>= 16.0.0'} - peerDependencies: - pg-native: '>=3.0.1' - peerDependenciesMeta: - pg-native: - optional: true - pgpass@1.0.5: resolution: {integrity: sha512-FdW9r/jQZhSeohs1Z3sI1yxFQNFvMcnmfuj4WBMUTxOrAyLMaTcE1aAMBiTlbMNaXvBCQuVi0R7hd8udDSP7ug==} @@ -6821,6 +6858,10 @@ packages: resolution: {integrity: sha512-vavAMRXOgBVNF6nyEEmL3DBK19iRpDcoIwW+swQ+CbGiu7lju6t+JklA1MHweoWtadgt4ISVUsXLyDq34ddcwA==} engines: {node: '>=4'} + strip-final-newline@2.0.0: + resolution: {integrity: sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==} + engines: {node: '>=6'} + strip-final-newline@3.0.0: resolution: {integrity: sha512-dOESqjYr96iWYylGObzd39EuNTa5VJxyvVAEm5Jnh7KGo75V43Hk1odPQkNDyXNmUR6k+gEiDVXnjB8HJ3crXw==} engines: {node: '>=12'} @@ -7620,6 +7661,11 @@ packages: engines: {node: '>= 8'} hasBin: true + which@4.0.0: + resolution: {integrity: sha512-GlaYyEb07DPxYCKhKzplCWBJtvxZcZMrL+4UkrTSJHHPyZU4mYYTv3qaOe77H7EODLSSopAUFAc6W8U4yqvscg==} + engines: {node: ^16.13.0 || >=18.0.0} + hasBin: true + which@5.0.0: resolution: {integrity: sha512-JEdGzHwwkrbWoGOlIHqQ5gtprKGOenpDHpxE9zVR1bWbOtYRyPPHMe9FaP6x61CmNaTThSkb0DAJte5jD+DmzQ==} engines: {node: ^18.17.0 || >=20.5.0} @@ -8004,6 +8050,18 @@ snapshots: '@csstools/css-tokenizer@3.0.4': optional: true + '@dotenvx/dotenvx@1.51.1': + dependencies: + commander: 11.1.0 + dotenv: 17.2.3 + eciesjs: 0.4.16 + execa: 5.1.1 + fdir: 6.5.0(picomatch@4.0.3) + ignore: 5.3.2 + object-treeify: 1.1.33 + picomatch: 4.0.3 + which: 4.0.0 + '@dxup/nuxt@0.2.0(magicast@0.5.0)': dependencies: '@dxup/unimport': 0.1.0 @@ -8022,6 +8080,10 @@ snapshots: dependencies: '@edge-runtime/primitives': 6.0.0 + '@ecies/ciphers@0.2.5(@noble/ciphers@1.3.0)': + dependencies: + '@noble/ciphers': 1.3.0 + '@emnapi/core@1.6.0': dependencies: '@emnapi/wasi-threads': 1.1.0 @@ -8502,10 +8564,18 @@ snapshots: '@next/swc-win32-x64-msvc@16.0.10': optional: true + '@noble/ciphers@1.3.0': {} + '@noble/ciphers@2.0.1': {} + '@noble/curves@1.9.7': + dependencies: + '@noble/hashes': 1.8.0 + '@noble/hashes@1.7.1': {} + '@noble/hashes@1.8.0': {} + '@noble/hashes@2.0.1': {} '@nodelib/fs.scandir@2.1.5': @@ -11087,6 +11157,13 @@ snapshots: eastasianwidth@0.2.0: {} + eciesjs@0.4.16: + dependencies: + '@ecies/ciphers': 0.2.5(@noble/ciphers@1.3.0) + '@noble/ciphers': 1.3.0 + '@noble/curves': 1.9.7 + '@noble/hashes': 1.8.0 + ee-first@1.1.1: {} effect@3.18.4: @@ -11534,6 +11611,18 @@ snapshots: optionalDependencies: '@sinclair/typebox': 0.34.41 + execa@5.1.1: + dependencies: + cross-spawn: 7.0.6 + get-stream: 6.0.1 + human-signals: 2.1.0 + is-stream: 2.0.1 + merge-stream: 2.0.0 + npm-run-path: 4.0.1 + onetime: 5.1.2 + signal-exit: 3.0.7 + strip-final-newline: 2.0.0 + execa@8.0.1: dependencies: cross-spawn: 7.0.6 @@ -11821,6 +11910,8 @@ snapshots: dunder-proto: 1.0.1 es-object-atoms: 1.1.1 + get-stream@6.0.1: {} + get-stream@8.0.1: {} get-stream@9.0.1: @@ -12005,6 +12096,8 @@ snapshots: httpxy@0.1.7: {} + human-signals@2.1.0: {} + human-signals@5.0.0: {} human-signals@8.0.1: {} @@ -12885,6 +12978,10 @@ snapshots: normalize-range@0.1.2: {} + npm-run-path@4.0.1: + dependencies: + path-key: 3.1.1 + npm-run-path@5.3.0: dependencies: path-key: 4.0.0 @@ -13032,6 +13129,8 @@ snapshots: object-keys@1.1.1: {} + object-treeify@1.1.33: {} + object.assign@4.1.7: dependencies: call-bind: 1.0.8 @@ -13276,8 +13375,6 @@ snapshots: pg-connection-string@2.9.1: {} - pg-connection-string@2.9.1: {} - pg-int8@1.0.1: {} pg-numeric@1.0.2: {} @@ -13316,16 +13413,6 @@ snapshots: optionalDependencies: pg-cloudflare: 1.2.7 - pg@8.16.3: - dependencies: - pg-connection-string: 2.9.1 - pg-pool: 3.10.1(pg@8.16.3) - pg-protocol: 1.10.3 - pg-types: 2.2.0 - pgpass: 1.0.5 - optionalDependencies: - pg-cloudflare: 1.2.7 - pgpass@1.0.5: dependencies: split2: 4.2.0 @@ -14238,6 +14325,8 @@ snapshots: strip-bom@3.0.0: {} + strip-final-newline@2.0.0: {} + strip-final-newline@3.0.0: {} strip-final-newline@4.0.0: {} @@ -15114,6 +15203,10 @@ snapshots: dependencies: isexe: 2.0.0 + which@4.0.0: + dependencies: + isexe: 3.1.1 + which@5.0.0: dependencies: isexe: 3.1.1 From ae21f69bed2ee61f16217125fa49c2c1e29ec068 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Thu, 25 Sep 2025 00:59:00 +0200 Subject: [PATCH 05/29] fix: missing import --- packages/language/src/zmodel-code-generator.ts | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/language/src/zmodel-code-generator.ts b/packages/language/src/zmodel-code-generator.ts index 5277f26a..21993878 100644 --- a/packages/language/src/zmodel-code-generator.ts +++ b/packages/language/src/zmodel-code-generator.ts @@ -28,6 +28,7 @@ import { LiteralExpr, MemberAccessExpr, Model, + ModelImport, NullExpr, NumberLiteral, ObjectExpr, From 8fb1e370b4c3f58d5e7f326c18520be8011f3b77 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Fri, 26 Sep 2025 02:57:44 +0200 Subject: [PATCH 06/29] fix: rewrite model generation generate model from ground up and diff later --- packages/cli/src/actions/db.ts | 39 +- packages/cli/src/actions/pull/index.ts | 423 ++++++++++++++---- .../src/actions/pull/provider/postgresql.ts | 295 ++++++++---- .../cli/src/actions/pull/provider/provider.ts | 9 +- .../cli/src/actions/pull/provider/sqlite.ts | 11 +- 5 files changed, 570 insertions(+), 207 deletions(-) diff --git a/packages/cli/src/actions/db.ts b/packages/cli/src/actions/db.ts index e79073e3..61e05956 100644 --- a/packages/cli/src/actions/db.ts +++ b/packages/cli/src/actions/db.ts @@ -1,3 +1,4 @@ +import type { Model } from '@zenstackhq/language/ast'; import { ZModelCodeGenerator } from '@zenstackhq/sdk'; import fs from 'node:fs'; import path from 'node:path'; @@ -5,7 +6,7 @@ import { execPrisma } from '../utils/exec-utils'; import { generateTempPrismaSchema, getSchemaFile, handleSubProcessError, requireDataSourceUrl, loadSchemaDocumentWithServices } from './action-utils'; import { syncEnums, syncRelation, syncTable, type Relation } from './pull'; import { providers } from './pull/provider'; -import { getDatasource, getDbName } from './pull/utils'; +import { getDatasource } from './pull/utils'; type PushOptions = { schema?: string; @@ -88,31 +89,35 @@ async function runPull(options: PullOptions) { const { enums, tables } = await provider.introspect(datasource.url) - syncEnums({ dbEnums: enums, model, services }) + const newModel: Model = { + $type: 'Model', + $container: undefined, + $containerProperty: undefined, + $containerIndex: undefined, + declarations: [...model.declarations.filter(d => ["DataSource"].includes(d.$type))], + imports: [], + }; + + + syncEnums({ dbEnums: enums, model: newModel, services }) - const resolveRelations: Relation[] = [] - for (const table of tables) { - const relations = syncTable({ table, model, provider, services }) - resolveRelations.push(...relations) - } - for (const relation of resolveRelations) { - syncRelation({ model, relation, services }); + + const resolvedRelations: Relation[] = [] + for (const table of tables) { + const relations = syncTable({ table, model: newModel, provider, services }) + resolvedRelations.push(...relations) } - for (const d of model.declarations) { - if (d.$type !== 'DataModel') continue - const found = tables.find((t) => getDbName(d) === t.name) - if (!found) { - delete (d.$container as any)[d.$containerProperty!][d.$containerIndex!] - } + for (const relation of resolvedRelations) { + syncRelation({ model: newModel, relation, services }); } - model.declarations = model.declarations.filter((d) => d !== undefined) + //TODO: diff models and apply changes only const generator = await new ZModelCodeGenerator(); - const zmodelSchema = await generator.generate(model) + const zmodelSchema = await generator.generate(newModel) console.log(options.out ? `Writing to ${options.out}` : schemaFile); diff --git a/packages/cli/src/actions/pull/index.ts b/packages/cli/src/actions/pull/index.ts index 6a7e2ba2..75225c95 100644 --- a/packages/cli/src/actions/pull/index.ts +++ b/packages/cli/src/actions/pull/index.ts @@ -1,7 +1,6 @@ import type { ZModelServices } from '@zenstackhq/language' import type { ArrayExpr, - Attribute, AttributeArg, DataField, DataFieldAttribute, @@ -14,40 +13,28 @@ import type { StringLiteral, UnsupportedFieldType } from '@zenstackhq/language/ast' -import { getStringLiteral } from '@zenstackhq/language/utils' import type { IntrospectedEnum, IntrospectedTable, IntrospectionProvider } from './provider' -import { getAttributeRef, getDbName, getEnumRef, getModelRef } from './utils' +import { getAttributeRef, getDbName } from './utils' -export function syncEnums({ dbEnums, model, services }: { dbEnums: IntrospectedEnum[], model: Model, services: ZModelServices }) { +export function syncEnums({ dbEnums, model }: { dbEnums: IntrospectedEnum[], model: Model, services: ZModelServices }) { for (const dbEnum of dbEnums) { - let schemaEnum = getEnumRef(dbEnum.enum_type, services); - - if (!schemaEnum) { - console.log(`Adding enum for type ${dbEnum.enum_type}`); - schemaEnum = { - $type: 'Enum' as const, - $container: model, - name: dbEnum.enum_type, - attributes: [], - comments: [], - fields: [], - } - model.declarations.push(schemaEnum) - } - schemaEnum.fields = dbEnum.values.map((v) => { - const existingValue = schemaEnum.fields.find((f) => getDbName(f) === v) - if (!existingValue) { - const enumField: EnumField = { + const schemaEnum = { + $type: 'Enum' as const, + $container: model, + name: dbEnum.enum_type, + attributes: [], + comments: [], + get fields() { + return dbEnum.values.map((v): EnumField => ({ $type: 'EnumField' as const, $container: schemaEnum, name: v, attributes: [], comments: [], - } - return enumField + })); } - return existingValue - }) + } + model.declarations.push(schemaEnum) } } @@ -62,6 +49,7 @@ export type Relation = { schema: string | null table: string | null column: string | null + type: 'one' | 'many' } } @@ -78,6 +66,7 @@ export function syncTable({ }) { const idAttribute = getAttributeRef('@id', services) const uniqueAttribute = getAttributeRef('@unique', services) + const modelUniqueAttribute = getAttributeRef('@@unique', services) const relationAttribute = getAttributeRef('@relation', services) const fieldMapAttribute = getAttributeRef('@map', services) const tableMapAttribute = getAttributeRef('@@map', services) @@ -87,25 +76,21 @@ export function syncTable({ } const relations: Relation[] = [] - let modelTable = getModelRef(table.name, services) - - if (!modelTable) { - console.log(`Adding model for table ${table.name}`); - - modelTable = { - $type: 'DataModel' as const, - $container: model, - name: table.name, - fields: [], - attributes: [], - comments: [], - isView: false, - mixins: [], - } - model.declarations.push(modelTable) + const modelTable: DataModel = { + $type: 'DataModel' as const, + $container: model, + name: table.name, + fields: [], + attributes: [], + comments: [], + isView: false, + mixins: [], } + model.declarations.push(modelTable) modelTable.fields = table.columns.map((col) => { + if (col.default) console.log(`${table.name}.${col.name} -> ${col.default}`); + if (col.foreign_key_table) { relations.push({ schema: table.schema, @@ -118,6 +103,7 @@ export function syncTable({ schema: col.foreign_key_schema, table: col.foreign_key_table, column: col.foreign_key_column, + type: col.unique ? 'one' : 'many', }, }) } @@ -125,49 +111,100 @@ export function syncTable({ const fieldPrefix = /[0-9]/g.test(col.name.charAt(0)) ? '_' : '' const fieldName = `${fieldPrefix}${col.name}` - const existingField = modelTable!.fields.find( - (f) => getDbName(f) === fieldName - ) - if (!existingField) { - const builtinType = provider.getBuiltinType(col.datatype) - const field: DataField = { - $type: 'DataField' as const, - get type() { - return { + const builtinType = provider.getBuiltinType(col.datatype) + const field: DataField = { + $type: 'DataField' as const, + get type() { + return { + $container: this, + $type: 'DataFieldType' as const, + type: builtinType.type === 'Unsupported' ? undefined : builtinType.type, + array: builtinType.isArray, + get unsupported() { + return builtinType.type === 'Unsupported' ? { + $container: this, + $type: 'UnsupportedFieldType' as const, + get value() { + return { + $container: this, + $type: 'StringLiteral', + value: col.datatype, + } satisfies StringLiteral + }, + } satisfies UnsupportedFieldType : undefined + }, + optional: col.nullable, + reference: col.options.length + ? { + $refText: col.datatype, + ref: model.declarations.find( + (d) => d.$type === 'Enum' && getDbName(d) === col.datatype + ) as Enum | undefined, + } + : undefined, + } satisfies DataFieldType + }, + $container: modelTable!, + name: fieldName, + get attributes() { + if (fieldPrefix !== '') return [] + + const getDefaultAttrs = () => { + if (!col.default) return []; + + const defaultValue = col.default && provider.getDefaultValue({ + fieldName: col.name, + defaultValue: col.default, + container: this, + services, + enums: model.declarations.filter((d) => d.$type === 'Enum') as Enum[], + }) + + if (!defaultValue) return []; + + if (Array.isArray(defaultValue)) { + return defaultValue; + } + + if (defaultValue?.$type === 'DataFieldAttribute') { + return [defaultValue]; + } + + return [{ + $type: 'DataFieldAttribute' as const, $container: this, - $type: 'DataFieldType' as const, - type: builtinType.type === 'Unsupported' ? undefined : builtinType.type, - array: builtinType.isArray, - get unsupported() { - return builtinType.type === 'Unsupported' ? { + decl: { + $refText: 'default', + ref: getAttributeRef('@default', services) + }, + get args() { + return [{ + $type: 'AttributeArg' as const, $container: this, - $type: 'UnsupportedFieldType' as const, + name: '', + $resolvedParam: { + name: '', + }, get value() { - return { - $container: this, - $type: 'StringLiteral', - value: col.datatype, - } satisfies StringLiteral + return { ...defaultValue, $container: this } }, - } satisfies UnsupportedFieldType : undefined + }] satisfies AttributeArg[] }, - optional: col.nullable, - reference: col.options.length - ? { - $refText: col.datatype, - ref: model.declarations.find( - (d) => d.$type === 'Enum' && getDbName(d) === col.datatype - ) as Enum | undefined, - } - : undefined, - } satisfies DataFieldType - }, - $container: modelTable!, - name: fieldName, - get attributes() { - if (fieldPrefix !== '') return [] + } satisfies DataFieldAttribute]; + } - return [{ + return [ + ...(col.pk ? [{ + $type: 'DataFieldAttribute' as const, + $container: this, + args: [], + decl: { + $refText: '@id', + ref: idAttribute, + }, + }] : []) satisfies DataFieldAttribute[], + ...getDefaultAttrs(), + { $type: 'DataFieldAttribute' as const, $container: this, decl: { @@ -178,9 +215,9 @@ export function syncTable({ return [{ $type: 'AttributeArg' as const, $container: this, - name: 'name', + name: '', $resolvedParam: { - name: 'name', + name: '', }, get value() { return { @@ -189,17 +226,58 @@ export function syncTable({ value: col.name, } }, - }] satisfies AttributeArg[] + } + ] satisfies AttributeArg[] }, - }] satisfies DataFieldAttribute[] - }, - comments: [], - } - return field + } + ] satisfies DataFieldAttribute[] + }, + comments: [], } - return existingField + return field }) + const uniqieColumns = table.columns.filter((c) => c.unique && !c.pk).map((c) => c.name) + if (uniqieColumns.length > 0) { + modelTable.attributes.push({ + $type: 'DataModelAttribute' as const, + $container: modelTable, + decl: { + $refText: '@unique', + ref: modelUniqueAttribute, + }, + get args() { + return uniqieColumns.map((c) => ({ + $type: 'AttributeArg' as const, + $container: this, + name: '', + $resolvedParam: { + name: '', + }, + get value() { + return { + $type: 'ArrayExpr' as const, + $container: this, + get items() { + return [{ + $container: this, + $type: 'ReferenceExpr' as const, + target: { + $refText: c, + ref: modelTable.fields.find((f) => f.name === c), + }, + args: [], + }] satisfies ReferenceExpr[] + } + } as ArrayExpr + }, + })) satisfies AttributeArg[] + }, + }) + + return relations + } + return relations } @@ -214,10 +292,6 @@ export function syncRelation({ model, relation, services }: { model: Model, rela throw new Error('Cannot find required attributes in the model.') } - if (!idAttribute || !uniqueAttribute || !relationAttribute) { - throw new Error('Cannot find required attributes in the model.') - } - const sourceModel = model.declarations.find( (d) => d.$type === 'DataModel' && getDbName(d) === relation.table ) as DataModel | undefined @@ -239,4 +313,169 @@ export function syncRelation({ model, relation, services }: { model: Model, rela if (!targetField) return //TODO: Finish relation sync + + const fieldPrefix = /[0-9]/g.test(sourceModel.name.charAt(0)) ? '_' : '' + + sourceModel.fields.push({ + $type: 'DataField' as const, + $container: sourceModel, + name: `${fieldPrefix}${sourceModel.name.charAt(0).toLowerCase()}${sourceModel.name.slice(1)}_${relation.column}`, + comments: [], + get type() { + return { + $container: this, + $type: 'DataFieldType' as const, + reference: { + ref: targetModel, + $refText: targetModel.name, + }, + optional: relation.nullable, + //TODO + array: relation.type === 'many', + } satisfies DataFieldType + }, + get attributes() { + return [{ + $type: 'DataFieldAttribute' as const, + $container: this, + decl: { + $refText: '@relation', + ref: relationAttribute, + }, + get args() { + return [{ + $type: 'AttributeArg' as const, + $container: this, + name: '', + $resolvedParam: { + name: '', + }, + get value() { + return { + $type: 'StringLiteral' as const, + $container: this, + value: relation.fk_name, + } satisfies StringLiteral + }, + }, + { + $type: 'AttributeArg' as const, + $container: this, + name: 'fields', + $resolvedParam: { + name: 'fields', + }, + get value() { + return { + $type: 'ArrayExpr' as const, + $container: this, + get items() { + return [{ + $container: this, + $type: 'ReferenceExpr' as const, + target: { + ref: sourceField, + $refText: sourceField.name, + }, + args: [], + }] satisfies ReferenceExpr[] + }, + } satisfies ArrayExpr + }, + }, { + $type: 'AttributeArg' as const, + $container: this, + name: 'references', + $resolvedParam: { + name: 'references', + }, + get value() { + return { + $type: 'ArrayExpr' as const, + $container: this, + get items() { + return [{ + $container: this, + $type: 'ReferenceExpr' as const, + target: { + ref: targetField, + $refText: targetField.name, + }, + args: [], + }] satisfies ReferenceExpr[] + }, + } satisfies ArrayExpr + }, + }, { + $type: 'AttributeArg' as const, + $container: this, + name: 'map', + $resolvedParam: { + name: 'map', + }, + get value() { + return { + $type: 'StringLiteral' as const, + $container: this, + value: relation.fk_name, + } satisfies StringLiteral + }, + }] satisfies AttributeArg[] + }, + }] satisfies DataFieldAttribute[] + }, + }) + + const oppositeFieldPrefix = /[0-9]/g.test(targetModel.name.charAt(0)) ? '_' : '' + const oppositeFieldName = relation.type === 'one' + ? `${oppositeFieldPrefix}${sourceModel.name.charAt(0).toLowerCase()}${sourceModel.name.slice(1)}_${relation.column}s` + : `${oppositeFieldPrefix}${sourceModel.name.charAt(0).toLowerCase()}${sourceModel.name.slice(1)}_${relation.column}` + + targetModel.fields.push({ + $type: 'DataField' as const, + $container: targetModel, + name: oppositeFieldName, + get type() { + return { + $container: this, + $type: 'DataFieldType' as const, + reference: { + ref: sourceModel, + $refText: sourceModel.name, + }, + optional: relation.references.type === 'one' && relation.nullable, + array: relation.references.type === 'many', + } satisfies DataFieldType + }, + get attributes() { + return [ + { + $type: 'DataFieldAttribute' as const, + $container: this, + decl: { + $refText: '@relation', + ref: relationAttribute, + }, + get args() { + return [{ + $type: 'AttributeArg' as const, + $container: this, + name: '', + $resolvedParam: { + name: '', + }, + get value() { + return { + $type: 'StringLiteral' as const, + $container: this, + value: relation.fk_name, + } satisfies StringLiteral + }, + }] satisfies AttributeArg[] + } + } + ] satisfies DataFieldAttribute[] + }, + comments: [], + }) } \ No newline at end of file diff --git a/packages/cli/src/actions/pull/provider/postgresql.ts b/packages/cli/src/actions/pull/provider/postgresql.ts index 10a9642a..be882be6 100644 --- a/packages/cli/src/actions/pull/provider/postgresql.ts +++ b/packages/cli/src/actions/pull/provider/postgresql.ts @@ -1,4 +1,6 @@ +import { AttributeArg, DataFieldAttribute, Expression, FunctionDecl, InvocationExpr } from '@zenstackhq/language/ast' import { Client } from 'pg' +import { getAttributeRef, getDbName } from '../utils' import type { IntrospectedEnum, IntrospectedSchema, @@ -126,6 +128,114 @@ export const postgresql: IntrospectionProvider = { tables, } }, + getDefaultValue({ defaultValue, container: $container, fieldName, services, enums }) { + // Handle common cases + console.log(defaultValue); + + const val = defaultValue.trim() + + if (val === 'CURRENT_TIMESTAMP' || val === 'now()') { + const attrs: DataFieldAttribute[] = []; + + attrs.push({ + $type: "DataFieldAttribute" as const, + $container: $container as any, + decl: { + $refText: '@default', + ref: getAttributeRef('@default', services) + }, + get args(): AttributeArg[] { + return [{ + $type: 'AttributeArg' as const, + $container: this as any, + get value(): Expression { + return { + $type: 'InvocationExpr' as const, + $container: this, + function: { + $refText: 'now', + ref: services.shared.workspace.IndexManager.allElements(FunctionDecl).find((f) => (f.node as FunctionDecl)?.name === 'now')?.node as FunctionDecl + }, + args: [], + } satisfies InvocationExpr + } + }] + } + }); + + if (fieldName.toLowerCase() === 'updatedat' || fieldName.toLowerCase() === 'updated_at') { + // for updatedAt, use @updatedAt attribute + attrs.push({ + $type: "DataFieldAttribute" as const, + $container: $container as any, + decl: { + $refText: 'updatedAt', + ref: getAttributeRef('@updatedAt', services) + }, + args: [], + }); + } + + return attrs.length === 1 ? attrs[0] : attrs; + } + + if (val.includes('::')) { + const [enumValue, enumName] = val.replace(/'|"/g, '').split('::').map((s) => s.trim()) as [string, string] + const enumDef = enums.find((e) => getDbName(e) === enumName) + if (!enumDef) { + throw new Error(`Enum type ${enumName} not found for default value ${defaultValue}`) + } + const enumField = enumDef.fields.find((v) => getDbName(v) === enumValue) + if (!enumField) { + throw new Error(`Enum value ${enumValue} not found in enum ${enumName} for default value ${defaultValue}`) + } + + return { + $type: 'ReferenceExpr' as const, + $container: $container as any, + target: { + $refText: enumField!.name, + ref: enumField, + }, + args: [], + } + } + + if (val === 'true' || val === 'false') { + return { + $type: 'BooleanLiteral' as const, + $container: $container as any, + value: val === 'true', + } + } + + if (/^\d+$/.test(val)) { + return { + $container: $container as any, + $type: 'NumberLiteral' as const, + value: val, + } + } + + if (/^-?\d+(\.\d+)?$/.test(val)) { + // float + return { + $container: $container as any, + $type: 'NumberLiteral' as const, + value: val, + } + } + + if (val.startsWith("'") && val.endsWith("'")) { + // string + return { + $container: $container as any, + $type: 'StringLiteral' as const, + value: val.slice(1, -1).replace(/''/g, "'"), + } + } + return undefined + }, } const enumIntrospectionQuery = ` @@ -141,102 +251,101 @@ ORDER BY schema_name, enum_type;` const tableIntrospectionQuery = ` SELECT -"ns"."nspname" AS "schema", -"cls"."relname" AS "name", -CASE "cls"."relkind" - WHEN 'r' THEN 'table' - WHEN 'v' THEN 'view' - ELSE NULL -END AS "type", -( -SELECT -coalesce(json_agg(agg), '[]') -FROM -( - SELECT - "att"."attname" AS "name", - "typ"."typname" AS "datatype", - "tns"."nspname" AS "datatype_schema", - "fk_ns"."nspname" AS "foreign_key_schema", - "fk_cls"."relname" AS "foreign_key_table", - "fk_att"."attname" AS "foreign_key_column", - "fk_con"."conname" AS "foreign_key_name", - CASE "fk_con"."confupdtype" - WHEN 'a' THEN 'NO ACTION' - WHEN 'r' THEN 'RESTRICT' - WHEN 'c' THEN 'CASCADE' - WHEN 'n' THEN 'SET NULL' - WHEN 'd' THEN 'SET DEFAULT' + "ns"."nspname" AS "schema", + "cls"."relname" AS "name", + CASE "cls"."relkind" + WHEN 'r' THEN 'table' + WHEN 'v' THEN 'view' ELSE NULL - END AS "foreign_key_on_update", - CASE "fk_con"."confdeltype" - WHEN 'a' THEN 'NO ACTION' - WHEN 'r' THEN 'RESTRICT' - WHEN 'c' THEN 'CASCADE' - WHEN 'n' THEN 'SET NULL' - WHEN 'd' THEN 'SET DEFAULT' + END AS "type", + CASE + WHEN "cls"."relkind" = 'v' THEN pg_get_viewdef("cls"."oid", true) ELSE NULL - END AS "foreign_key_on_delete", - "pk_con"."conkey" IS NOT NULL AS "pk", - ( - EXISTS ( - SELECT 1 - FROM "pg_catalog"."pg_constraint" AS "u_con" - WHERE "u_con"."contype" = 'u' - AND "u_con"."conrelid" = "cls"."oid" - AND array_length("u_con"."conkey", 1) = 1 - AND "att"."attnum" = ANY ("u_con"."conkey") - ) - OR EXISTS ( - SELECT 1 - FROM "pg_catalog"."pg_index" AS "u_idx" - WHERE "u_idx"."indrelid" = "cls"."oid" - AND "u_idx"."indisunique" = TRUE - AND "u_idx"."indnkeyatts" = 1 - AND "att"."attnum" = ANY ("u_idx"."indkey"::int2[]) - ) - ) AS "unique", - "att"."attgenerated" != '' AS "computed", - "att"."attnotnull" != TRUE AS "nullable", - coalesce( + END AS "definition", ( - SELECT - json_agg("enm"."enumlabel") AS "o" - FROM - "pg_catalog"."pg_enum" AS "enm" - WHERE - "enm"."enumtypid" = "typ"."oid" - ), - '[]' - ) AS "options" - FROM - "pg_catalog"."pg_attribute" AS "att" - INNER JOIN "pg_catalog"."pg_type" AS "typ" ON "typ"."oid" = "att"."atttypid" - INNER JOIN "pg_catalog"."pg_namespace" AS "tns" ON "tns"."oid" = "typ"."typnamespace" - LEFT JOIN "pg_catalog"."pg_constraint" AS "pk_con" ON "pk_con"."contype" = 'p' - AND "pk_con"."conrelid" = "cls"."oid" - AND "att"."attnum" = ANY ("pk_con"."conkey") - LEFT JOIN "pg_catalog"."pg_constraint" AS "fk_con" ON "fk_con"."contype" = 'f' - AND "fk_con"."conrelid" = "cls"."oid" - AND "att"."attnum" = ANY ("fk_con"."conkey") - LEFT JOIN "pg_catalog"."pg_class" AS "fk_cls" ON "fk_cls"."oid" = "fk_con"."confrelid" - LEFT JOIN "pg_catalog"."pg_namespace" AS "fk_ns" ON "fk_ns"."oid" = "fk_cls"."relnamespace" - LEFT JOIN "pg_catalog"."pg_attribute" AS "fk_att" ON "fk_att"."attrelid" = "fk_cls"."oid" - AND "fk_att"."attnum" = ANY ("fk_con"."confkey") - WHERE - "att"."attrelid" = "cls"."oid" - AND "att"."attnum" >= 0 - AND "att"."attisdropped" != TRUE - ORDER BY "att"."attnum" -) AS agg -) AS "columns" -FROM -"pg_catalog"."pg_class" AS "cls" + SELECT coalesce(json_agg(agg), '[]') + FROM ( + SELECT + "att"."attname" AS "name", + "typ"."typname" AS "datatype", + "tns"."nspname" AS "datatype_schema", + "fk_ns"."nspname" AS "foreign_key_schema", + "fk_cls"."relname" AS "foreign_key_table", + "fk_att"."attname" AS "foreign_key_column", + "fk_con"."conname" AS "foreign_key_name", + CASE "fk_con"."confupdtype" + WHEN 'a' THEN 'NO ACTION' + WHEN 'r' THEN 'RESTRICT' + WHEN 'c' THEN 'CASCADE' + WHEN 'n' THEN 'SET NULL' + WHEN 'd' THEN 'SET DEFAULT' + ELSE NULL + END AS "foreign_key_on_update", + CASE "fk_con"."confdeltype" + WHEN 'a' THEN 'NO ACTION' + WHEN 'r' THEN 'RESTRICT' + WHEN 'c' THEN 'CASCADE' + WHEN 'n' THEN 'SET NULL' + WHEN 'd' THEN 'SET DEFAULT' + ELSE NULL + END AS "foreign_key_on_delete", + "pk_con"."conkey" IS NOT NULL AS "pk", + ( + EXISTS ( + SELECT 1 + FROM "pg_catalog"."pg_constraint" AS "u_con" + WHERE "u_con"."contype" = 'u' + AND "u_con"."conrelid" = "cls"."oid" + AND array_length("u_con"."conkey", 1) = 1 + AND "att"."attnum" = ANY ("u_con"."conkey") + ) + OR EXISTS ( + SELECT 1 + FROM "pg_catalog"."pg_index" AS "u_idx" + WHERE "u_idx"."indrelid" = "cls"."oid" + AND "u_idx"."indisunique" = TRUE + AND "u_idx"."indnkeyatts" = 1 + AND "att"."attnum" = ANY ("u_idx"."indkey"::int2[]) + ) + ) AS "unique", + "att"."attgenerated" != '' AS "computed", + pg_get_expr("def"."adbin", "def"."adrelid") AS "default", + "att"."attnotnull" != TRUE AS "nullable", + coalesce( + ( + SELECT json_agg("enm"."enumlabel") AS "o" + FROM "pg_catalog"."pg_enum" AS "enm" + WHERE "enm"."enumtypid" = "typ"."oid" + ), + '[]' + ) AS "options" + FROM "pg_catalog"."pg_attribute" AS "att" + INNER JOIN "pg_catalog"."pg_type" AS "typ" ON "typ"."oid" = "att"."atttypid" + INNER JOIN "pg_catalog"."pg_namespace" AS "tns" ON "tns"."oid" = "typ"."typnamespace" + LEFT JOIN "pg_catalog"."pg_constraint" AS "pk_con" ON "pk_con"."contype" = 'p' + AND "pk_con"."conrelid" = "cls"."oid" + AND "att"."attnum" = ANY ("pk_con"."conkey") + LEFT JOIN "pg_catalog"."pg_constraint" AS "fk_con" ON "fk_con"."contype" = 'f' + AND "fk_con"."conrelid" = "cls"."oid" + AND "att"."attnum" = ANY ("fk_con"."conkey") + LEFT JOIN "pg_catalog"."pg_class" AS "fk_cls" ON "fk_cls"."oid" = "fk_con"."confrelid" + LEFT JOIN "pg_catalog"."pg_namespace" AS "fk_ns" ON "fk_ns"."oid" = "fk_cls"."relnamespace" + LEFT JOIN "pg_catalog"."pg_attribute" AS "fk_att" ON "fk_att"."attrelid" = "fk_cls"."oid" + AND "fk_att"."attnum" = ANY ("fk_con"."confkey") + LEFT JOIN "pg_catalog"."pg_attrdef" AS "def" ON "def"."adrelid" = "cls"."oid" AND "def"."adnum" = "att"."attnum" + WHERE + "att"."attrelid" = "cls"."oid" + AND "att"."attnum" >= 0 + AND "att"."attisdropped" != TRUE + ORDER BY "att"."attnum" + ) AS agg + ) AS "columns" +FROM "pg_catalog"."pg_class" AS "cls" INNER JOIN "pg_catalog"."pg_namespace" AS "ns" ON "cls"."relnamespace" = "ns"."oid" WHERE -"ns"."nspname" !~ '^pg_' -AND "ns"."nspname" != 'information_schema' -AND "cls"."relkind" IN ('r', 'v') -AND "cls"."relname" !~ '^pg_' -AND "cls"."relname" !~ '_prisma_migrations' + "ns"."nspname" !~ '^pg_' + AND "ns"."nspname" != 'information_schema' + AND "cls"."relkind" IN ('r', 'v') + AND "cls"."relname" !~ '^pg_' + AND "cls"."relname" !~ '_prisma_migrations' ` diff --git a/packages/cli/src/actions/pull/provider/provider.ts b/packages/cli/src/actions/pull/provider/provider.ts index d8bd0928..b6f76b98 100644 --- a/packages/cli/src/actions/pull/provider/provider.ts +++ b/packages/cli/src/actions/pull/provider/provider.ts @@ -1,11 +1,14 @@ -import type { BuiltinType } from '@zenstackhq/language/ast' +import type { BuiltinType, DataFieldAttribute, Enum, InvocationExpr, LiteralExpr, ReferenceExpr } from '@zenstackhq/language/ast' +import type { AstNode } from '../../../../../language/dist/ast.cjs'; +import type { ZModelServices } from '@zenstackhq/language'; -export type Cascade = "NO ACTION" | "RESTRICT"| "CASCADE" | "SET NULL" | "SET DEFAULT" | null; +export type Cascade = "NO ACTION" | "RESTRICT" | "CASCADE" | "SET NULL" | "SET DEFAULT" | null; export interface IntrospectedTable { schema: string name: string type: 'table' | 'view' + definition: string | null columns: { name: string datatype: string @@ -21,6 +24,7 @@ export interface IntrospectedTable { nullable: boolean options: string[] unique: boolean + default: string | null }[] } @@ -41,4 +45,5 @@ export interface IntrospectionProvider { type: BuiltinType | 'Unsupported' isArray: boolean } + getDefaultValue(args: { fieldName: string, defaultValue: string, container: T, services: ZModelServices, enums: Enum[] }): LiteralExpr | InvocationExpr | DataFieldAttribute | DataFieldAttribute[] | ReferenceExpr | undefined } diff --git a/packages/cli/src/actions/pull/provider/sqlite.ts b/packages/cli/src/actions/pull/provider/sqlite.ts index 61883ef9..3feaa5ab 100644 --- a/packages/cli/src/actions/pull/provider/sqlite.ts +++ b/packages/cli/src/actions/pull/provider/sqlite.ts @@ -82,8 +82,8 @@ export const sqlite: IntrospectionProvider = { } // List user tables and views (exclude internal sqlite_*) - const tablesRaw = all<{ name: string; type: 'table' | 'view' }>( - "SELECT name, type FROM sqlite_schema WHERE type IN ('table','view') AND name NOT LIKE 'sqlite_%' ORDER BY name" + const tablesRaw = all<{ name: string; type: 'table' | 'view'; definition: string | null }>( + "SELECT name, type, sql AS definition FROM sqlite_schema WHERE type IN ('table','view') AND name NOT LIKE 'sqlite_%' ORDER BY name" ) const tables: IntrospectedTable[] = [] @@ -173,12 +173,13 @@ export const sqlite: IntrospectionProvider = { pk: !!c.pk, computed: hidden === 2, nullable: c.notnull !== 1, + default: c.dflt_value, options: [], unique: uniqueSingleColumn.has(c.name), }) } - tables.push({ schema, name: tableName, columns, type: t.type }) + tables.push({ schema, name: tableName, columns, type: t.type, definition: t.definition }) } const enums: IntrospectedEnum[] = [] // SQLite doesn't support enums @@ -188,4 +189,8 @@ export const sqlite: IntrospectionProvider = { db.close() } }, + + getDefaultValue(_args) { + throw new Error('Not implemented yet for SQLite') + } } From ba26032472d6f172bdd6b5c98e1780140afe8979 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Mon, 6 Oct 2025 00:56:27 +0200 Subject: [PATCH 07/29] feat: add ast factory --- packages/cli/src/actions/db.ts | 42 +- packages/cli/src/actions/pull/index.ts | 754 ++++++++---------- .../cli/src/actions/pull/provider/index.ts | 10 +- .../src/actions/pull/provider/postgresql.ts | 428 +++++----- .../cli/src/actions/pull/provider/provider.ts | 96 ++- .../cli/src/actions/pull/provider/sqlite.ts | 173 ++-- packages/cli/src/actions/pull/utils.ts | 133 +-- packages/language/package.json | 10 + packages/language/src/factory/attribute.ts | 275 +++++++ packages/language/src/factory/declaration.ts | 363 +++++++++ packages/language/src/factory/expression.ts | 303 +++++++ packages/language/src/factory/index.ts | 61 ++ packages/language/src/factory/primitives.ts | 61 ++ packages/language/tsup.config.ts | 1 + 14 files changed, 1858 insertions(+), 852 deletions(-) create mode 100644 packages/language/src/factory/attribute.ts create mode 100644 packages/language/src/factory/declaration.ts create mode 100644 packages/language/src/factory/expression.ts create mode 100644 packages/language/src/factory/index.ts create mode 100644 packages/language/src/factory/primitives.ts diff --git a/packages/cli/src/actions/db.ts b/packages/cli/src/actions/db.ts index 61e05956..8dea5cd9 100644 --- a/packages/cli/src/actions/db.ts +++ b/packages/cli/src/actions/db.ts @@ -7,6 +7,7 @@ import { generateTempPrismaSchema, getSchemaFile, handleSubProcessError, require import { syncEnums, syncRelation, syncTable, type Relation } from './pull'; import { providers } from './pull/provider'; import { getDatasource } from './pull/utils'; +import { config } from '@dotenvx/dotenvx'; type PushOptions = { schema?: string; @@ -14,9 +15,11 @@ type PushOptions = { forceReset?: boolean; }; -type PullOptions = { +export type PullOptions = { schema?: string; out?: string; + naming?: 'pascal' | 'camel' | 'snake' | 'kebab' | 'none'; + alwaysMap?: boolean; }; /** @@ -67,62 +70,57 @@ async function runPush(options: PushOptions) { async function runPull(options: PullOptions) { const schemaFile = getSchemaFile(options.schema); const { model, services } = await loadSchemaDocumentWithServices(schemaFile); - await import("@dotenvx/dotenvx/config") - const SUPPORTED_PROVIDERS = ['sqlite', 'postgresql'] - const datasource = getDatasource(model) + config(); + const SUPPORTED_PROVIDERS = ['sqlite', 'postgresql']; + const datasource = getDatasource(model); if (!datasource) { - throw new Error('No datasource found in the schema.') + throw new Error('No datasource found in the schema.'); } if (!SUPPORTED_PROVIDERS.includes(datasource.provider)) { - throw new Error(`Unsupported datasource provider: ${datasource.provider}`) + throw new Error(`Unsupported datasource provider: ${datasource.provider}`); } const provider = providers[datasource.provider]; if (!provider) { - throw new Error( - `No introspection provider found for: ${datasource.provider}` - ) + throw new Error(`No introspection provider found for: ${datasource.provider}`); } - const { enums, tables } = await provider.introspect(datasource.url) + const { enums, tables } = await provider.introspect(datasource.url); const newModel: Model = { $type: 'Model', $container: undefined, $containerProperty: undefined, $containerIndex: undefined, - declarations: [...model.declarations.filter(d => ["DataSource"].includes(d.$type))], + declarations: [...model.declarations.filter((d) => ['DataSource'].includes(d.$type))], imports: [], }; + syncEnums({ dbEnums: enums, model: newModel, services, options }); - syncEnums({ dbEnums: enums, model: newModel, services }) - - - - const resolvedRelations: Relation[] = [] + const resolvedRelations: Relation[] = []; for (const table of tables) { - const relations = syncTable({ table, model: newModel, provider, services }) - resolvedRelations.push(...relations) + const relations = syncTable({ table, model: newModel, provider, services, options }); + resolvedRelations.push(...relations); } for (const relation of resolvedRelations) { - syncRelation({ model: newModel, relation, services }); + syncRelation({ model: newModel, relation, services, options }); } //TODO: diff models and apply changes only - const generator = await new ZModelCodeGenerator(); + const generator = new ZModelCodeGenerator(); - const zmodelSchema = await generator.generate(newModel) + const zmodelSchema = generator.generate(newModel); console.log(options.out ? `Writing to ${options.out}` : schemaFile); const outPath = options.out ? path.resolve(options.out) : schemaFile; console.log(outPath); - fs.writeFileSync(outPath, zmodelSchema) + fs.writeFileSync(outPath, zmodelSchema); } diff --git a/packages/cli/src/actions/pull/index.ts b/packages/cli/src/actions/pull/index.ts index 75225c95..708244a3 100644 --- a/packages/cli/src/actions/pull/index.ts +++ b/packages/cli/src/actions/pull/index.ts @@ -1,481 +1,371 @@ -import type { ZModelServices } from '@zenstackhq/language' -import type { - ArrayExpr, - AttributeArg, - DataField, - DataFieldAttribute, - DataFieldType, - DataModel, - Enum, - EnumField, - Model, - ReferenceExpr, - StringLiteral, - UnsupportedFieldType -} from '@zenstackhq/language/ast' -import type { IntrospectedEnum, IntrospectedTable, IntrospectionProvider } from './provider' -import { getAttributeRef, getDbName } from './utils' - -export function syncEnums({ dbEnums, model }: { dbEnums: IntrospectedEnum[], model: Model, services: ZModelServices }) { +import type { ZModelServices } from '@zenstackhq/language'; +import { isEnum, type DataField, type DataModel, type Enum, type Model } from '@zenstackhq/language/ast'; +import { DataFieldFactory, DataModelFactory, EnumFactory } from '@zenstackhq/language/factory'; +import type { PullOptions } from '../db'; +import type { IntrospectedEnum, IntrospectedTable, IntrospectionProvider } from './provider'; +import { getAttributeRef, getDbName } from './utils'; + +export function syncEnums({ + dbEnums, + model, + options: options, + services, +}: { + dbEnums: IntrospectedEnum[]; + model: Model; + services: ZModelServices; + options: PullOptions; +}) { for (const dbEnum of dbEnums) { - const schemaEnum = { - $type: 'Enum' as const, - $container: model, - name: dbEnum.enum_type, - attributes: [], - comments: [], - get fields() { - return dbEnum.values.map((v): EnumField => ({ - $type: 'EnumField' as const, - $container: schemaEnum, - name: v, - attributes: [], - comments: [], - })); - } - } - model.declarations.push(schemaEnum) + const { modified, name } = resolveNameCasing(options, dbEnum.enum_type); + if (modified) console.log(`Mapping enum ${dbEnum.enum_type} to ${name}`); + const factory = new EnumFactory().setName(name); + if (modified) + factory.addAttribute((builder) => + builder + .setDecl(getAttributeRef('@@map', services)!) + .addArg((argBuilder) => argBuilder.StringLiteral.setValue(dbEnum.enum_type)), + ); + + dbEnum.values.map((v) => { + const { name, modified } = resolveNameCasing(options, v); + factory.addField((builder) => { + builder.setName(name); + if (modified) + builder.addAttribute((builder) => + builder + .setDecl(getAttributeRef('@map', services)!) + .addArg((argBuilder) => argBuilder.StringLiteral.setValue(v)), + ); + + return builder; + }); + }); + model.declarations.push(factory.get({ $container: model })); } } -export type Relation = { - schema: string - table: string - column: string - type: 'one' | 'many' - fk_name: string - nullable: boolean - references: { - schema: string | null - table: string | null - column: string | null - type: 'one' | 'many' +function resolveNameCasing(options: PullOptions, originalName: string) { + let name: string; + + switch (options.naming) { + case 'pascal': + name = toPascalCase(originalName); + break; + case 'camel': + name = toCamelCase(originalName); + break; + case 'snake': + name = toSnakeCase(originalName); + break; + case 'kebab': + name = toKebabCase(originalName); + break; + case 'none': + default: + name = originalName; + break; } + + return { + modified: options.alwaysMap ? true : name !== originalName, + name, + }; } +function toPascalCase(str: string): string { + return str.replace(/[_\- ]+(\w)/g, (_, c) => c.toUpperCase()).replace(/^\w/, (c) => c.toUpperCase()); +} + +function toCamelCase(str: string): string { + return str.replace(/[_\- ]+(\w)/g, (_, c) => c.toUpperCase()).replace(/^\w/, (c) => c.toLowerCase()); +} + +function toSnakeCase(str: string): string { + return str + .replace(/[- ]+/g, '_') + .replace(/([a-z0-9])([A-Z])/g, '$1_$2') + .toLowerCase(); +} + +function toKebabCase(str: string): string { + return str + .replace(/[_ ]+/g, '-') + .replace(/([a-z0-9])([A-Z])/g, '$1-$2') + .toLowerCase(); +} + +export type Relation = { + schema: string; + table: string; + column: string; + type: 'one' | 'many'; + fk_name: string; + nullable: boolean; + references: { + schema: string | null; + table: string | null; + column: string | null; + type: 'one' | 'many'; + }; +}; + export function syncTable({ model, provider, table, - services + services, + options, }: { - table: IntrospectedTable - model: Model - provider: IntrospectionProvider - services: ZModelServices + table: IntrospectedTable; + model: Model; + provider: IntrospectionProvider; + services: ZModelServices; + options: PullOptions; }) { - const idAttribute = getAttributeRef('@id', services) - const uniqueAttribute = getAttributeRef('@unique', services) - const modelUniqueAttribute = getAttributeRef('@@unique', services) - const relationAttribute = getAttributeRef('@relation', services) - const fieldMapAttribute = getAttributeRef('@map', services) - const tableMapAttribute = getAttributeRef('@@map', services) + const idAttribute = getAttributeRef('@id', services); + const modelIdAttribute = getAttributeRef('@@id', services); + const uniqueAttribute = getAttributeRef('@unique', services); + const modelUniqueAttribute = getAttributeRef('@@unique', services); + const relationAttribute = getAttributeRef('@relation', services); + const fieldMapAttribute = getAttributeRef('@map', services); + const tableMapAttribute = getAttributeRef('@@map', services); + const modelindexAttribute = getAttributeRef('@@index', services); - if (!idAttribute || !uniqueAttribute || !relationAttribute || !fieldMapAttribute || !tableMapAttribute) { - throw new Error('Cannot find required attributes in the model.') + if ( + !idAttribute || + !uniqueAttribute || + !relationAttribute || + !fieldMapAttribute || + !tableMapAttribute || + !modelIdAttribute || + !modelUniqueAttribute || + !modelindexAttribute + ) { + throw new Error('Cannot find required attributes in the model.'); } - const relations: Relation[] = [] - const modelTable: DataModel = { - $type: 'DataModel' as const, - $container: model, - name: table.name, - fields: [], - attributes: [], - comments: [], - isView: false, - mixins: [], + const relations: Relation[] = []; + const { name, modified } = resolveNameCasing({ ...options, naming: 'pascal' }, table.name); + const multiPk = table.columns.filter((c) => c.pk).length > 1; + + const modelFactory = new DataModelFactory().setName(name).setIsView(table.type === 'view'); + modelFactory.setContainer(model); + if (modified) { + modelFactory.addAttribute((builder) => + builder.setDecl(tableMapAttribute).addArg((argBuilder) => argBuilder.StringLiteral.setValue(table.name)), + ); } - model.declarations.push(modelTable) - modelTable.fields = table.columns.map((col) => { - if (col.default) console.log(`${table.name}.${col.name} -> ${col.default}`); + if (multiPk) { + const pkColumns = table.columns.filter((c) => c.pk).map((c) => c.name); + modelFactory.addAttribute((builder) => + builder.setDecl(modelIdAttribute).addArg((argBuilder) => { + const arrayExpr = argBuilder.ArrayExpr; + pkColumns.map((c) => { + const ref = modelFactory.node.fields.find((f) => getDbName(f) === c)!; + arrayExpr.addItem((itemBuilder) => itemBuilder.ReferenceExpr.setTarget(ref)); + }); + return arrayExpr; + }), + ); + } - if (col.foreign_key_table) { + table.columns.forEach((column) => { + if (column.foreign_key_table) { relations.push({ schema: table.schema, table: table.name, - column: col.name, + column: column.name, type: 'one', - fk_name: col.foreign_key_name!, - nullable: col.nullable, + fk_name: column.foreign_key_name!, + nullable: column.nullable, references: { - schema: col.foreign_key_schema, - table: col.foreign_key_table, - column: col.foreign_key_column, - type: col.unique ? 'one' : 'many', + schema: column.foreign_key_schema, + table: column.foreign_key_table, + column: column.foreign_key_column, + type: column.unique ? 'one' : 'many', }, - }) + }); } - const fieldPrefix = /[0-9]/g.test(col.name.charAt(0)) ? '_' : '' - const fieldName = `${fieldPrefix}${col.name}` - - const builtinType = provider.getBuiltinType(col.datatype) - const field: DataField = { - $type: 'DataField' as const, - get type() { - return { - $container: this, - $type: 'DataFieldType' as const, - type: builtinType.type === 'Unsupported' ? undefined : builtinType.type, - array: builtinType.isArray, - get unsupported() { - return builtinType.type === 'Unsupported' ? { - $container: this, - $type: 'UnsupportedFieldType' as const, - get value() { - return { - $container: this, - $type: 'StringLiteral', - value: col.datatype, - } satisfies StringLiteral - }, - } satisfies UnsupportedFieldType : undefined - }, - optional: col.nullable, - reference: col.options.length - ? { - $refText: col.datatype, - ref: model.declarations.find( - (d) => d.$type === 'Enum' && getDbName(d) === col.datatype - ) as Enum | undefined, - } - : undefined, - } satisfies DataFieldType - }, - $container: modelTable!, - name: fieldName, - get attributes() { - if (fieldPrefix !== '') return [] - - const getDefaultAttrs = () => { - if (!col.default) return []; - - const defaultValue = col.default && provider.getDefaultValue({ - fieldName: col.name, - defaultValue: col.default, - container: this, - services, - enums: model.declarations.filter((d) => d.$type === 'Enum') as Enum[], - }) - - if (!defaultValue) return []; - - if (Array.isArray(defaultValue)) { - return defaultValue; - } + const fieldPrefix = /[0-9]/g.test(column.name.charAt(0)) ? '_' : ''; + const { name: _name, modified } = resolveNameCasing(options, column.name); + const name = `${fieldPrefix}${_name}`; - if (defaultValue?.$type === 'DataFieldAttribute') { - return [defaultValue]; - } + const builtinType = provider.getBuiltinType(column.datatype); + + modelFactory.addField((builder) => { + builder.setName(name); + builder.setType((typeBuilder) => { + typeBuilder.setArray(builtinType.isArray); + typeBuilder.setOptional(column.nullable); - return [{ - $type: 'DataFieldAttribute' as const, - $container: this, - decl: { - $refText: 'default', - ref: getAttributeRef('@default', services) - }, - get args() { - return [{ - $type: 'AttributeArg' as const, - $container: this, - name: '', - $resolvedParam: { - name: '', - }, - get value() { - return { ...defaultValue, $container: this } - }, - }] satisfies AttributeArg[] - }, - } satisfies DataFieldAttribute]; + if (builtinType.type != 'Unsupported') { + typeBuilder.setType(builtinType.type); + } else { + typeBuilder.setUnsupported((unsupportedBuilder) => + unsupportedBuilder.setValue((lt) => lt.StringLiteral.setValue(column.datatype)), + ); } - return [ - ...(col.pk ? [{ - $type: 'DataFieldAttribute' as const, - $container: this, - args: [], - decl: { - $refText: '@id', - ref: idAttribute, - }, - }] : []) satisfies DataFieldAttribute[], - ...getDefaultAttrs(), - { - $type: 'DataFieldAttribute' as const, - $container: this, - decl: { - $refText: '@map', - ref: fieldMapAttribute, - }, - get args() { - return [{ - $type: 'AttributeArg' as const, - $container: this, - name: '', - $resolvedParam: { - name: '', - }, - get value() { - return { - $type: 'StringLiteral' as const, - $container: this, - value: col.name, - } - }, - } - ] satisfies AttributeArg[] - }, + if (column.options.length > 0) { + const ref = model.declarations.find((d) => isEnum(d) && getDbName(d) === column.datatype) as + | Enum + | undefined; + + if (ref) { + typeBuilder.setReference(ref); } - ] satisfies DataFieldAttribute[] - }, - comments: [], - } - return field - }) + } + + return typeBuilder; + }); + + if (column.default) { + const defaultValuesAttrs = column.default + ? provider.getDefaultValue({ + fieldName: column.name, + defaultValue: column.default, + services, + enums: model.declarations.filter((d) => d.$type === 'Enum') as Enum[], + }) + : []; + defaultValuesAttrs.forEach(builder.addAttribute); + } + + if (column.pk && !multiPk) { + builder.addAttribute((b) => b.setDecl(idAttribute)); + } - const uniqieColumns = table.columns.filter((c) => c.unique && !c.pk).map((c) => c.name) + if (column.unique) + builder.addAttribute((b) => { + b.setDecl(uniqueAttribute); + if (column.unique_name) b.addArg((ab) => ab.StringLiteral.setValue(column.unique_name!), 'map'); + + return b; + }); + if (modified) + builder.addAttribute((ab) => + ab.setDecl(fieldMapAttribute).addArg((ab) => ab.StringLiteral.setValue(column.name), 'name'), + ); + + return builder; + }); + }); + + const uniqieColumns = table.columns.filter((c) => c.unique && !c.pk).map((c) => c.name); if (uniqieColumns.length > 0) { - modelTable.attributes.push({ - $type: 'DataModelAttribute' as const, - $container: modelTable, - decl: { - $refText: '@unique', - ref: modelUniqueAttribute, - }, - get args() { - return uniqieColumns.map((c) => ({ - $type: 'AttributeArg' as const, - $container: this, - name: '', - $resolvedParam: { - name: '', - }, - get value() { - return { - $type: 'ArrayExpr' as const, - $container: this, - get items() { - return [{ - $container: this, - $type: 'ReferenceExpr' as const, - target: { - $refText: c, - ref: modelTable.fields.find((f) => f.name === c), - }, - args: [], - }] satisfies ReferenceExpr[] - } - } as ArrayExpr - }, - })) satisfies AttributeArg[] - }, - }) - - return relations + modelFactory.addAttribute((builder) => + builder.setDecl(modelUniqueAttribute).addArg((argBuilder) => { + const arrayExpr = argBuilder.ArrayExpr; + uniqieColumns.map((c) => { + const ref = modelFactory.node.fields.find((f) => getDbName(f) === c)!; + arrayExpr.addItem((itemBuilder) => itemBuilder.ReferenceExpr.setTarget(ref)); + }); + return arrayExpr; + }), + ); } - return relations + model.declarations.push(modelFactory.node); + + table.indexes.forEach((index) => { + modelFactory.addAttribute((builder) => + builder.setDecl(modelindexAttribute).addArg((argBuilder) => { + const arrayExpr = argBuilder.ArrayExpr; + index.columns.map((c) => { + const ref = modelFactory.node.fields.find((f) => getDbName(f) === c.name)!; + arrayExpr.addItem((itemBuilder) => itemBuilder.ReferenceExpr.setTarget(ref)); + }); + return arrayExpr; + }), + ); + }); + + return relations; } -export function syncRelation({ model, relation, services }: { model: Model, relation: Relation, services: ZModelServices }) { - const idAttribute = getAttributeRef('@id', services) - const uniqueAttribute = getAttributeRef('@unique', services) - const relationAttribute = getAttributeRef('@relation', services) - const fieldMapAttribute = getAttributeRef('@map', services) - const tableMapAttribute = getAttributeRef('@@map', services) +export function syncRelation({ + model, + relation, + services, +}: { + model: Model; + relation: Relation; + services: ZModelServices; + options: PullOptions; +}) { + const idAttribute = getAttributeRef('@id', services); + const uniqueAttribute = getAttributeRef('@unique', services); + const relationAttribute = getAttributeRef('@relation', services); + const fieldMapAttribute = getAttributeRef('@map', services); + const tableMapAttribute = getAttributeRef('@@map', services); if (!idAttribute || !uniqueAttribute || !relationAttribute || !fieldMapAttribute || !tableMapAttribute) { - throw new Error('Cannot find required attributes in the model.') + throw new Error('Cannot find required attributes in the model.'); } - const sourceModel = model.declarations.find( - (d) => d.$type === 'DataModel' && getDbName(d) === relation.table - ) as DataModel | undefined - if (!sourceModel) return + const sourceModel = model.declarations.find((d) => d.$type === 'DataModel' && getDbName(d) === relation.table) as + | DataModel + | undefined; + if (!sourceModel) return; - const sourceField = sourceModel.fields.find( - (f) => getDbName(f) === relation.column - ) as DataField | undefined - if (!sourceField) return + const sourceField = sourceModel.fields.find((f) => getDbName(f) === relation.column) as DataField | undefined; + if (!sourceField) return; const targetModel = model.declarations.find( - (d) => d.$type === 'DataModel' && getDbName(d) === relation.references.table - ) as DataModel | undefined - if (!targetModel) return + (d) => d.$type === 'DataModel' && getDbName(d) === relation.references.table, + ) as DataModel | undefined; + if (!targetModel) return; - const targetField = targetModel.fields.find( - (f) => getDbName(f) === relation.references.column - ) - if (!targetField) return + const targetField = targetModel.fields.find((f) => getDbName(f) === relation.references.column); + if (!targetField) return; //TODO: Finish relation sync - const fieldPrefix = /[0-9]/g.test(sourceModel.name.charAt(0)) ? '_' : '' - - sourceModel.fields.push({ - $type: 'DataField' as const, - $container: sourceModel, - name: `${fieldPrefix}${sourceModel.name.charAt(0).toLowerCase()}${sourceModel.name.slice(1)}_${relation.column}`, - comments: [], - get type() { - return { - $container: this, - $type: 'DataFieldType' as const, - reference: { - ref: targetModel, - $refText: targetModel.name, - }, - optional: relation.nullable, - //TODO - array: relation.type === 'many', - } satisfies DataFieldType - }, - get attributes() { - return [{ - $type: 'DataFieldAttribute' as const, - $container: this, - decl: { - $refText: '@relation', - ref: relationAttribute, - }, - get args() { - return [{ - $type: 'AttributeArg' as const, - $container: this, - name: '', - $resolvedParam: { - name: '', - }, - get value() { - return { - $type: 'StringLiteral' as const, - $container: this, - value: relation.fk_name, - } satisfies StringLiteral - }, - }, - { - $type: 'AttributeArg' as const, - $container: this, - name: 'fields', - $resolvedParam: { - name: 'fields', - }, - get value() { - return { - $type: 'ArrayExpr' as const, - $container: this, - get items() { - return [{ - $container: this, - $type: 'ReferenceExpr' as const, - target: { - ref: sourceField, - $refText: sourceField.name, - }, - args: [], - }] satisfies ReferenceExpr[] - }, - } satisfies ArrayExpr - }, - }, { - $type: 'AttributeArg' as const, - $container: this, - name: 'references', - $resolvedParam: { - name: 'references', - }, - get value() { - return { - $type: 'ArrayExpr' as const, - $container: this, - get items() { - return [{ - $container: this, - $type: 'ReferenceExpr' as const, - target: { - ref: targetField, - $refText: targetField.name, - }, - args: [], - }] satisfies ReferenceExpr[] - }, - } satisfies ArrayExpr - }, - }, { - $type: 'AttributeArg' as const, - $container: this, - name: 'map', - $resolvedParam: { - name: 'map', - }, - get value() { - return { - $type: 'StringLiteral' as const, - $container: this, - value: relation.fk_name, - } satisfies StringLiteral - }, - }] satisfies AttributeArg[] - }, - }] satisfies DataFieldAttribute[] - }, - }) - - const oppositeFieldPrefix = /[0-9]/g.test(targetModel.name.charAt(0)) ? '_' : '' - const oppositeFieldName = relation.type === 'one' - ? `${oppositeFieldPrefix}${sourceModel.name.charAt(0).toLowerCase()}${sourceModel.name.slice(1)}_${relation.column}s` - : `${oppositeFieldPrefix}${sourceModel.name.charAt(0).toLowerCase()}${sourceModel.name.slice(1)}_${relation.column}` - - targetModel.fields.push({ - $type: 'DataField' as const, - $container: targetModel, - name: oppositeFieldName, - get type() { - return { - $container: this, - $type: 'DataFieldType' as const, - reference: { - ref: sourceModel, - $refText: sourceModel.name, - }, - optional: relation.references.type === 'one' && relation.nullable, - array: relation.references.type === 'many', - } satisfies DataFieldType - }, - get attributes() { - return [ - { - $type: 'DataFieldAttribute' as const, - $container: this, - decl: { - $refText: '@relation', - ref: relationAttribute, - }, - get args() { - return [{ - $type: 'AttributeArg' as const, - $container: this, - name: '', - $resolvedParam: { - name: '', - }, - get value() { - return { - $type: 'StringLiteral' as const, - $container: this, - value: relation.fk_name, - } satisfies StringLiteral - }, - }] satisfies AttributeArg[] - } - } - ] satisfies DataFieldAttribute[] - }, - comments: [], - }) -} \ No newline at end of file + const fieldPrefix = /[0-9]/g.test(sourceModel.name.charAt(0)) ? '_' : ''; + + const relationName = `${sourceModel.name}_${relation.column}To${targetModel.name}_${relation.references.column}`; + + const sourceFieldFactory = new DataFieldFactory() + .setContainer(sourceModel) + .setName( + `${fieldPrefix}${sourceModel.name.charAt(0).toLowerCase()}${sourceModel.name.slice(1)}_${relation.column}`, + ) + .setType((tb) => + tb + .setOptional(relation.nullable) + .setArray(relation.type === 'many') + .setReference(targetModel), + ) + .addAttribute((ab) => + ab + .setDecl(relationAttribute) + .addArg((ab) => ab.StringLiteral.setValue(relationName)) + .addArg((ab) => ab.ArrayExpr.addItem((aeb) => aeb.ReferenceExpr.setTarget(sourceField)), 'fields') + .addArg((ab) => ab.ArrayExpr.addItem((aeb) => aeb.ReferenceExpr.setTarget(targetField)), 'references') + .addArg((ab) => ab.ArrayExpr.addItem((aeb) => aeb.StringLiteral.setValue(relation.fk_name)), 'map'), + ); + + sourceModel.fields.push(sourceFieldFactory.node); + + const oppositeFieldPrefix = /[0-9]/g.test(targetModel.name.charAt(0)) ? '_' : ''; + const oppositeFieldName = + relation.type === 'one' + ? `${oppositeFieldPrefix}${sourceModel.name.charAt(0).toLowerCase()}${sourceModel.name.slice(1)}_${relation.column}s` + : `${oppositeFieldPrefix}${sourceModel.name.charAt(0).toLowerCase()}${sourceModel.name.slice(1)}_${relation.column}`; + + const targetFieldFactory = new DataFieldFactory() + .setContainer(targetModel) + .setName(oppositeFieldName) + .setType((tb) => + tb + .setOptional(relation.references.type === 'one') + .setArray(relation.references.type === 'many') + .setReference(sourceModel), + ) + .addAttribute((ab) => ab.setDecl(relationAttribute).addArg((ab) => ab.StringLiteral.setValue(relationName))); + + targetModel.fields.push(targetFieldFactory.node); +} diff --git a/packages/cli/src/actions/pull/provider/index.ts b/packages/cli/src/actions/pull/provider/index.ts index 82ee2ac3..4c9a0fe8 100644 --- a/packages/cli/src/actions/pull/provider/index.ts +++ b/packages/cli/src/actions/pull/provider/index.ts @@ -1,9 +1,9 @@ -export * from './provider' +export * from './provider'; -import { postgresql } from "./postgresql"; -import { sqlite } from "./sqlite"; +import { postgresql } from './postgresql'; +import { sqlite } from './sqlite'; export const providers = { postgresql, - sqlite -}; \ No newline at end of file + sqlite, +}; diff --git a/packages/cli/src/actions/pull/provider/postgresql.ts b/packages/cli/src/actions/pull/provider/postgresql.ts index be882be6..07dcee91 100644 --- a/packages/cli/src/actions/pull/provider/postgresql.ts +++ b/packages/cli/src/actions/pull/provider/postgresql.ts @@ -1,242 +1,176 @@ -import { AttributeArg, DataFieldAttribute, Expression, FunctionDecl, InvocationExpr } from '@zenstackhq/language/ast' -import { Client } from 'pg' -import { getAttributeRef, getDbName } from '../utils' -import type { - IntrospectedEnum, - IntrospectedSchema, - IntrospectedTable, - IntrospectionProvider, -} from './provider' +import { Client } from 'pg'; +import { getAttributeRef, getDbName, getFunctionRef } from '../utils'; +import type { IntrospectedEnum, IntrospectedSchema, IntrospectedTable, IntrospectionProvider } from './provider'; +import { DataFieldAttributeFactory } from '@zenstackhq/language/factory'; export const postgresql: IntrospectionProvider = { - getBuiltinType(type) { - const t = (type || '').toLowerCase() + getBuiltinType(type) { + const t = (type || '').toLowerCase(); - const isArray = t.startsWith('_') + const isArray = t.startsWith('_'); - switch (t.replace(/^_/, '')) { - // integers - case 'int2': - case 'smallint': - case 'int4': - case 'integer': - return { type: 'Int', isArray } - case 'int8': - case 'bigint': - return { type: 'BigInt', isArray } + switch (t.replace(/^_/, '')) { + // integers + case 'int2': + case 'smallint': + case 'int4': + case 'integer': + return { type: 'Int', isArray }; + case 'int8': + case 'bigint': + return { type: 'BigInt', isArray }; - // decimals and floats - case 'numeric': - case 'decimal': - return { type: 'Decimal', isArray } - case 'float4': - case 'real': - case 'float8': - case 'double precision': - return { type: 'Float', isArray } + // decimals and floats + case 'numeric': + case 'decimal': + return { type: 'Decimal', isArray }; + case 'float4': + case 'real': + case 'float8': + case 'double precision': + return { type: 'Float', isArray }; - // boolean - case 'bool': - case 'boolean': - return { type: 'Boolean', isArray } + // boolean + case 'bool': + case 'boolean': + return { type: 'Boolean', isArray }; - // strings - case 'text': - case 'varchar': - case 'bpchar': - case 'character varying': - case 'character': - return { type: 'String', isArray } + // strings + case 'text': + case 'varchar': + case 'bpchar': + case 'character varying': + case 'character': + return { type: 'String', isArray }; - // uuid - case 'uuid': - return { type: 'String', isArray } + // uuid + case 'uuid': + return { type: 'String', isArray }; - // dates/times - case 'date': - case 'timestamp': - case 'timestamptz': - return { type: 'DateTime', isArray } + // dates/times + case 'date': + case 'timestamp': + case 'timestamptz': + return { type: 'DateTime', isArray }; - // binary - case 'bytea': - return { type: 'Bytes', isArray } + // binary + case 'bytea': + return { type: 'Bytes', isArray }; - // json - case 'json': - case 'jsonb': - return { type: 'Json', isArray } + // json + case 'json': + case 'jsonb': + return { type: 'Json', isArray }; - // unsupported or postgres-specific - case 'time': - case 'timetz': - case 'interval': - case 'money': - case 'xml': - case 'bit': - case 'varbit': - case 'cidr': - case 'inet': - case 'macaddr': - case 'macaddr8': - case 'point': - case 'line': - case 'lseg': - case 'box': - case 'path': - case 'polygon': - case 'circle': - case 'tsvector': - case 'tsquery': - case 'jsonpath': - case 'hstore': - case 'oid': - case 'name': - case 'regclass': - case 'regproc': - case 'regprocedure': - case 'regoper': - case 'regoperator': - case 'regtype': - case 'regconfig': - case 'regdictionary': - case 'pg_lsn': - case 'txid_snapshot': - case 'int4range': - case 'int8range': - case 'numrange': - case 'tsrange': - case 'tstzrange': - case 'daterange': - default: - return { type: 'Unsupported' as const, isArray } - } - }, - async introspect(connectionString: string): Promise { - const client = new Client({ connectionString }) - await client.connect() + // unsupported or postgres-specific + case 'time': + case 'timetz': + case 'interval': + case 'money': + case 'xml': + case 'bit': + case 'varbit': + case 'cidr': + case 'inet': + case 'macaddr': + case 'macaddr8': + case 'point': + case 'line': + case 'lseg': + case 'box': + case 'path': + case 'polygon': + case 'circle': + case 'tsvector': + case 'tsquery': + case 'jsonpath': + case 'hstore': + case 'oid': + case 'name': + case 'regclass': + case 'regproc': + case 'regprocedure': + case 'regoper': + case 'regoperator': + case 'regtype': + case 'regconfig': + case 'regdictionary': + case 'pg_lsn': + case 'txid_snapshot': + case 'int4range': + case 'int8range': + case 'numrange': + case 'tsrange': + case 'tstzrange': + case 'daterange': + default: + return { type: 'Unsupported' as const, isArray }; + } + }, + async introspect(connectionString: string): Promise { + const client = new Client({ connectionString }); + await client.connect(); - const { rows: tables } = await client.query( - tableIntrospectionQuery - ) - const { rows: enums } = await client.query( - enumIntrospectionQuery - ) + const { rows: tables } = await client.query(tableIntrospectionQuery); + const { rows: enums } = await client.query(enumIntrospectionQuery); - return { - enums, - tables, - } - }, - getDefaultValue({ defaultValue, container: $container, fieldName, services, enums }) { - // Handle common cases - console.log(defaultValue); + return { + enums, + tables, + }; + }, + getDefaultValue({ defaultValue, fieldName, services, enums }) { + const val = defaultValue.trim(); + const factories: DataFieldAttributeFactory[] = []; - const val = defaultValue.trim() + const defaultAttr = new DataFieldAttributeFactory().setDecl(getAttributeRef('@default', services)!); - if (val === 'CURRENT_TIMESTAMP' || val === 'now()') { - const attrs: DataFieldAttribute[] = []; + if (val === 'CURRENT_TIMESTAMP' || val === 'now()') { + factories.push(defaultAttr.addArg((ab) => ab.InvocationExpr.setFunction(getFunctionRef('now', services)!))); - attrs.push({ - $type: "DataFieldAttribute" as const, - $container: $container as any, - decl: { - $refText: '@default', - ref: getAttributeRef('@default', services) - }, - get args(): AttributeArg[] { - return [{ - $type: 'AttributeArg' as const, - $container: this as any, - get value(): Expression { - return { - $type: 'InvocationExpr' as const, - $container: this, - function: { - $refText: 'now', - ref: services.shared.workspace.IndexManager.allElements(FunctionDecl).find((f) => (f.node as FunctionDecl)?.name === 'now')?.node as FunctionDecl - }, - args: [], - } satisfies InvocationExpr + if (fieldName.toLowerCase() === 'updatedat' || fieldName.toLowerCase() === 'updated_at') { + factories.push(new DataFieldAttributeFactory().setDecl(getAttributeRef('@updatedAt', services)!)); } - }] + return factories; } - }); - - if (fieldName.toLowerCase() === 'updatedat' || fieldName.toLowerCase() === 'updated_at') { - // for updatedAt, use @updatedAt attribute - attrs.push({ - $type: "DataFieldAttribute" as const, - $container: $container as any, - decl: { - $refText: 'updatedAt', - ref: getAttributeRef('@updatedAt', services) - }, - args: [], - }); - } - - return attrs.length === 1 ? attrs[0] : attrs; - } - if (val.includes('::')) { - const [enumValue, enumName] = val.replace(/'|"/g, '').split('::').map((s) => s.trim()) as [string, string] - const enumDef = enums.find((e) => getDbName(e) === enumName) - if (!enumDef) { - throw new Error(`Enum type ${enumName} not found for default value ${defaultValue}`) - } - const enumField = enumDef.fields.find((v) => getDbName(v) === enumValue) - if (!enumField) { - throw new Error(`Enum value ${enumValue} not found in enum ${enumName} for default value ${defaultValue}`) - } - - return { - $type: 'ReferenceExpr' as const, - $container: $container as any, - target: { - $refText: enumField!.name, - ref: enumField, - }, - args: [], - } - } + if (val.includes('::')) { + const [enumValue, enumName] = val + .replace(/'|"/g, '') + .split('::') + .map((s) => s.trim()) as [string, string]; + const enumDef = enums.find((e) => getDbName(e) === enumName); + if (!enumDef) { + return []; + } + const enumField = enumDef.fields.find((v) => getDbName(v) === enumValue); + if (!enumField) { + throw new Error( + `Enum value ${enumValue} not found in enum ${enumName} for default value ${defaultValue}`, + ); + } - if (val === 'true' || val === 'false') { - return { - $type: 'BooleanLiteral' as const, - $container: $container as any, - value: val === 'true', - } - } + factories.push(defaultAttr.addArg((ab) => ab.ReferenceExpr.setTarget(enumField))); + return factories; + } - if (/^\d+$/.test(val)) { - return { - $container: $container as any, - $type: 'NumberLiteral' as const, - value: val, - } - } + if (val === 'true' || val === 'false') { + factories.push(defaultAttr.addArg((ab) => ab.BooleanLiteral.setValue(val === 'true'))); + return factories; + } - if (/^-?\d+(\.\d+)?$/.test(val)) { - // float - return { - $container: $container as any, - $type: 'NumberLiteral' as const, - value: val, - } - } + if (/^\d+$/.test(val) || /^-?\d+(\.\d+)?$/.test(val)) { + factories.push(defaultAttr.addArg((ab) => ab.NumberLiteral.setValue(val))); + return factories; + } - if (val.startsWith("'") && val.endsWith("'")) { - // string - return { - $container: $container as any, - $type: 'StringLiteral' as const, - value: val.slice(1, -1).replace(/''/g, "'"), - } - } - return undefined - }, -} + if (val.startsWith("'") && val.endsWith("'")) { + factories.push(defaultAttr.addArg((ab) => ab.StringLiteral.setValue(val.slice(1, -1).replace(/''/g, "'")))); + return factories; + } + return []; + }, +}; const enumIntrospectionQuery = ` SELECT @@ -247,7 +181,7 @@ FROM pg_type t JOIN pg_enum e ON t.oid = e.enumtypid JOIN pg_namespace n ON n.oid = t.typnamespace GROUP BY schema_name, enum_type -ORDER BY schema_name, enum_type;` +ORDER BY schema_name, enum_type;`; const tableIntrospectionQuery = ` SELECT @@ -308,6 +242,29 @@ SELECT AND "att"."attnum" = ANY ("u_idx"."indkey"::int2[]) ) ) AS "unique", + ( + SELECT COALESCE( + ( + SELECT "u_con"."conname" + FROM "pg_catalog"."pg_constraint" AS "u_con" + WHERE "u_con"."contype" = 'u' + AND "u_con"."conrelid" = "cls"."oid" + AND array_length("u_con"."conkey", 1) = 1 + AND "att"."attnum" = ANY ("u_con"."conkey") + LIMIT 1 + ), + ( + SELECT "u_idx_cls"."relname" + FROM "pg_catalog"."pg_index" AS "u_idx" + JOIN "pg_catalog"."pg_class" AS "u_idx_cls" ON "u_idx"."indexrelid" = "u_idx_cls"."oid" + WHERE "u_idx"."indrelid" = "cls"."oid" + AND "u_idx"."indisunique" = TRUE + AND "u_idx"."indnkeyatts" = 1 + AND "att"."attnum" = ANY ("u_idx"."indkey"::int2[]) + LIMIT 1 + ) + ) + ) AS "unique_name", "att"."attgenerated" != '' AS "computed", pg_get_expr("def"."adbin", "def"."adrelid") AS "default", "att"."attnotnull" != TRUE AS "nullable", @@ -339,7 +296,41 @@ SELECT AND "att"."attisdropped" != TRUE ORDER BY "att"."attnum" ) AS agg - ) AS "columns" + ) AS "columns", + ( + SELECT coalesce(json_agg(agg), '[]') + FROM ( + SELECT + "idx_cls"."relname" AS "name", + "am"."amname" AS "method", + "idx"."indisunique" AS "unique", + "idx"."indisprimary" AS "primary", + "idx"."indisvalid" AS "valid", + "idx"."indisready" AS "ready", + ("idx"."indpred" IS NOT NULL) AS "partial", + pg_get_expr("idx"."indpred", "idx"."indrelid") AS "predicate", + ( + SELECT json_agg( + json_build_object( + 'name', COALESCE("att"."attname", pg_get_indexdef("idx"."indexrelid", "s"."i", true)), + 'expression', CASE WHEN "att"."attname" IS NULL THEN pg_get_indexdef("idx"."indexrelid", "s"."i", true) ELSE NULL END, + 'order', CASE ((( "idx"."indoption"::int2[] )["s"."i"] & 1)) WHEN 1 THEN 'DESC' ELSE 'ASC' END, + 'nulls', CASE (((( "idx"."indoption"::int2[] )["s"."i"] >> 1) & 1)) WHEN 1 THEN 'NULLS FIRST' ELSE 'NULLS LAST' END + ) + ORDER BY "s"."i" + ) + FROM generate_subscripts("idx"."indkey"::int2[], 1) AS "s"("i") + LEFT JOIN "pg_catalog"."pg_attribute" AS "att" + ON "att"."attrelid" = "cls"."oid" + AND "att"."attnum" = ("idx"."indkey"::int2[])["s"."i"] + ) AS "columns" + FROM "pg_catalog"."pg_index" AS "idx" + JOIN "pg_catalog"."pg_class" AS "idx_cls" ON "idx"."indexrelid" = "idx_cls"."oid" + JOIN "pg_catalog"."pg_am" AS "am" ON "idx_cls"."relam" = "am"."oid" + WHERE "idx"."indrelid" = "cls"."oid" + ORDER BY "idx_cls"."relname" + ) AS agg + ) AS "indexes" FROM "pg_catalog"."pg_class" AS "cls" INNER JOIN "pg_catalog"."pg_namespace" AS "ns" ON "cls"."relnamespace" = "ns"."oid" WHERE @@ -348,4 +339,5 @@ WHERE AND "cls"."relkind" IN ('r', 'v') AND "cls"."relname" !~ '^pg_' AND "cls"."relname" !~ '_prisma_migrations' -` + ORDER BY "ns"."nspname", "cls"."relname" ASC; +`; diff --git a/packages/cli/src/actions/pull/provider/provider.ts b/packages/cli/src/actions/pull/provider/provider.ts index b6f76b98..c03c39fc 100644 --- a/packages/cli/src/actions/pull/provider/provider.ts +++ b/packages/cli/src/actions/pull/provider/provider.ts @@ -1,49 +1,71 @@ -import type { BuiltinType, DataFieldAttribute, Enum, InvocationExpr, LiteralExpr, ReferenceExpr } from '@zenstackhq/language/ast' -import type { AstNode } from '../../../../../language/dist/ast.cjs'; import type { ZModelServices } from '@zenstackhq/language'; +import type { BuiltinType, Enum } from '@zenstackhq/language/ast'; +import type { DataFieldAttributeFactory } from '@zenstackhq/language/factory'; -export type Cascade = "NO ACTION" | "RESTRICT" | "CASCADE" | "SET NULL" | "SET DEFAULT" | null; +export type Cascade = 'NO ACTION' | 'RESTRICT' | 'CASCADE' | 'SET NULL' | 'SET DEFAULT' | null; export interface IntrospectedTable { - schema: string - name: string - type: 'table' | 'view' - definition: string | null - columns: { - name: string - datatype: string - datatype_schema: string - foreign_key_schema: string | null - foreign_key_table: string | null - foreign_key_column: string | null - foreign_key_name: string | null - foreign_key_on_update: Cascade - foreign_key_on_delete: Cascade - pk: boolean - computed: boolean - nullable: boolean - options: string[] - unique: boolean - default: string | null - }[] + schema: string; + name: string; + type: 'table' | 'view'; + definition: string | null; + columns: { + name: string; + datatype: string; + datatype_schema: string; + foreign_key_schema: string | null; + foreign_key_table: string | null; + foreign_key_column: string | null; + foreign_key_name: string | null; + foreign_key_on_update: Cascade; + foreign_key_on_delete: Cascade; + pk: boolean; + computed: boolean; + nullable: boolean; + options: string[]; + unique: boolean; + unique_name: string | null; + default: string | null; + }[]; + indexes: { + name: string; + method: string | null; + unique: boolean; + primary: boolean; + valid: boolean; + ready: boolean; + partial: boolean; + predicate: string | null; + columns: { + name: string; + expression: string | null; + order: 'ASC' | 'DESC' | null; + nulls: string | null; + }[]; + }[]; } export type IntrospectedEnum = { - schema_name: string - enum_type: string - values: string[] -} + schema_name: string; + enum_type: string; + values: string[]; +}; export type IntrospectedSchema = { - tables: IntrospectedTable[] - enums: IntrospectedEnum[] -} + tables: IntrospectedTable[]; + enums: IntrospectedEnum[]; +}; export interface IntrospectionProvider { - introspect(connectionString: string): Promise - getBuiltinType(type: string): { - type: BuiltinType | 'Unsupported' - isArray: boolean - } - getDefaultValue(args: { fieldName: string, defaultValue: string, container: T, services: ZModelServices, enums: Enum[] }): LiteralExpr | InvocationExpr | DataFieldAttribute | DataFieldAttribute[] | ReferenceExpr | undefined + introspect(connectionString: string): Promise; + getBuiltinType(type: string): { + type: BuiltinType | 'Unsupported'; + isArray: boolean; + }; + getDefaultValue(args: { + fieldName: string; + defaultValue: string; + services: ZModelServices; + enums: Enum[]; + }): DataFieldAttributeFactory[]; } diff --git a/packages/cli/src/actions/pull/provider/sqlite.ts b/packages/cli/src/actions/pull/provider/sqlite.ts index 3feaa5ab..160a3096 100644 --- a/packages/cli/src/actions/pull/provider/sqlite.ts +++ b/packages/cli/src/actions/pull/provider/sqlite.ts @@ -1,14 +1,14 @@ -import type { IntrospectedEnum, IntrospectedSchema, IntrospectedTable, IntrospectionProvider } from './provider' +import type { IntrospectedEnum, IntrospectedSchema, IntrospectedTable, IntrospectionProvider } from './provider'; // Note: We dynamically import better-sqlite3 inside the async function to avoid // requiring it at module load time for environments that don't use SQLite. export const sqlite: IntrospectionProvider = { getBuiltinType(type) { - const t = (type || '').toLowerCase().trim() + const t = (type || '').toLowerCase().trim(); // SQLite has no array types - const isArray = false + const isArray = false; switch (t) { // integers @@ -17,24 +17,24 @@ export const sqlite: IntrospectionProvider = { case 'tinyint': case 'smallint': case 'mediumint': - return { type: 'Int', isArray } + return { type: 'Int', isArray }; case 'bigint': - return { type: 'BigInt', isArray } + return { type: 'BigInt', isArray }; // decimals and floats case 'numeric': case 'decimal': - return { type: 'Decimal', isArray } + return { type: 'Decimal', isArray }; case 'real': case 'double': case 'double precision': case 'float': - return { type: 'Float', isArray } + return { type: 'Float', isArray }; // boolean (SQLite stores as integer 0/1, but commonly typed as BOOLEAN) case 'bool': case 'boolean': - return { type: 'Boolean', isArray } + return { type: 'Boolean', isArray }; // strings case 'text': @@ -44,102 +44,128 @@ export const sqlite: IntrospectionProvider = { case 'character': case 'clob': case 'uuid': // often stored as TEXT - return { type: 'String', isArray } + return { type: 'String', isArray }; // dates/times (stored as TEXT/REAL/INTEGER, but commonly typed as DATE/DATETIME) case 'date': case 'datetime': - return { type: 'DateTime', isArray } + return { type: 'DateTime', isArray }; // binary case 'blob': - return { type: 'Bytes', isArray } + return { type: 'Bytes', isArray }; // json (not a native type, but commonly used) case 'json': - return { type: 'Json', isArray } + return { type: 'Json', isArray }; default: { // Fallbacks based on SQLite type affinity rules - if (t.includes('int')) return { type: 'Int', isArray } - if (t.includes('char') || t.includes('clob') || t.includes('text')) return { type: 'String', isArray } - if (t.includes('blob')) return { type: 'Bytes', isArray } - if (t.includes('real') || t.includes('floa') || t.includes('doub')) return { type: 'Float', isArray } - if (t.includes('dec') || t.includes('num')) return { type: 'Decimal', isArray } - return { type: 'Unsupported' as const, isArray } + if (t.includes('int')) return { type: 'Int', isArray }; + if (t.includes('char') || t.includes('clob') || t.includes('text')) return { type: 'String', isArray }; + if (t.includes('blob')) return { type: 'Bytes', isArray }; + if (t.includes('real') || t.includes('floa') || t.includes('doub')) return { type: 'Float', isArray }; + if (t.includes('dec') || t.includes('num')) return { type: 'Decimal', isArray }; + return { type: 'Unsupported' as const, isArray }; } } }, async introspect(connectionString: string): Promise { - const SQLite = (await import('better-sqlite3')).default - const db = new SQLite(connectionString, { readonly: true }) + const SQLite = (await import('better-sqlite3')).default; + const db = new SQLite(connectionString, { readonly: true }); try { const all = (sql: string): T[] => { - const stmt: any = db.prepare(sql) - return stmt.all() as T[] - } + const stmt: any = db.prepare(sql); + return stmt.all() as T[]; + }; // List user tables and views (exclude internal sqlite_*) const tablesRaw = all<{ name: string; type: 'table' | 'view'; definition: string | null }>( - "SELECT name, type, sql AS definition FROM sqlite_schema WHERE type IN ('table','view') AND name NOT LIKE 'sqlite_%' ORDER BY name" - ) + "SELECT name, type, sql AS definition FROM sqlite_schema WHERE type IN ('table','view') AND name NOT LIKE 'sqlite_%' ORDER BY name", + ); - const tables: IntrospectedTable[] = [] + const tables: IntrospectedTable[] = []; for (const t of tablesRaw) { - const tableName = t.name - const schema = 'main' + const tableName = t.name; + const schema = 'main'; // Columns with extended info; filter out hidden=1 (internal/rowid), mark computed if hidden=2 (generated) const columnsInfo = all<{ - cid: number - name: string - type: string - notnull: number - dflt_value: string | null - pk: number - hidden?: number - }>(`PRAGMA table_xinfo('${tableName.replace(/'/g, "''")}')`) + cid: number; + name: string; + type: string; + notnull: number; + dflt_value: string | null; + pk: number; + hidden?: number; + }>(`PRAGMA table_xinfo('${tableName.replace(/'/g, "''")}')`); + + // Index list (used for both unique inference and index collection) + const tableNameEsc = tableName.replace(/'/g, "''"); + const idxList = all<{ + seq: number; + name: string; + unique: number; + origin: string; + partial: number; + }>(`PRAGMA index_list('${tableNameEsc}')`); // Unique columns detection via unique indexes with single column - const uniqueIndexRows = all<{ name: string; unique: number }>( - `PRAGMA index_list('${tableName.replace(/'/g, "''")}')` - ).filter((r) => r.unique === 1) - - const uniqueSingleColumn = new Set() + const uniqueSingleColumn = new Set(); + const uniqueIndexRows = idxList.filter((r) => r.unique === 1); for (const idx of uniqueIndexRows) { - const idxCols = all<{ name: string }>( - `PRAGMA index_info('${idx.name.replace(/'/g, "''")}')` - ) + const idxCols = all<{ name: string }>(`PRAGMA index_info('${idx.name.replace(/'/g, "''")}')`); if (idxCols.length === 1 && idxCols[0]?.name) { - uniqueSingleColumn.add(idxCols[0].name) + uniqueSingleColumn.add(idxCols[0].name); } } + // Indexes details + const indexes: IntrospectedTable['indexes'] = idxList.map((idx) => { + const idxCols = all<{ name: string }>(`PRAGMA index_info('${idx.name.replace(/'/g, "''")}')`); + return { + name: idx.name, + method: null, // SQLite does not expose index method + unique: idx.unique === 1, + primary: false, // SQLite does not expose this directly; handled via pk in columns + valid: true, // SQLite does not expose index validity + ready: true, // SQLite does not expose index readiness + partial: idx.partial === 1, + predicate: null, // SQLite does not expose index predicate + columns: idxCols.map((col) => ({ + name: col.name, + expression: null, + order: null, + nulls: null, + })), + }; + }); + // Foreign keys mapping by column name const fkRows = all<{ - id: number - seq: number - table: string - from: string - to: string | null - on_update: any - on_delete: any - }>(`PRAGMA foreign_key_list('${tableName.replace(/'/g, "''")}')`) + id: number; + seq: number; + table: string; + from: string; + to: string | null; + on_update: any; + on_delete: any; + }>(`PRAGMA foreign_key_list('${tableName.replace(/'/g, "''")}')`); const fkByColumn = new Map< string, { - foreign_key_schema: string | null - foreign_key_table: string | null - foreign_key_column: string | null - foreign_key_name: string | null - foreign_key_on_update: IntrospectedTable['columns'][number]['foreign_key_on_update'] - foreign_key_on_delete: IntrospectedTable['columns'][number]['foreign_key_on_delete'] + foreign_key_schema: string | null; + foreign_key_table: string | null; + foreign_key_column: string | null; + foreign_key_name: string | null; + foreign_key_on_update: IntrospectedTable['columns'][number]['foreign_key_on_update']; + foreign_key_on_delete: IntrospectedTable['columns'][number]['foreign_key_on_delete']; } - >() + >(); for (const fk of fkRows) { fkByColumn.set(fk.from, { @@ -149,16 +175,16 @@ export const sqlite: IntrospectionProvider = { foreign_key_name: null, foreign_key_on_update: (fk.on_update as any) ?? null, foreign_key_on_delete: (fk.on_delete as any) ?? null, - }) + }); } - const columns: IntrospectedTable['columns'] = [] + const columns: IntrospectedTable['columns'] = []; for (const c of columnsInfo) { // hidden: 1 (hidden/internal) -> skip; 2 (generated) -> mark computed - const hidden = c.hidden ?? 0 - if (hidden === 1) continue + const hidden = c.hidden ?? 0; + if (hidden === 1) continue; - const fk = fkByColumn.get(c.name) + const fk = fkByColumn.get(c.name); columns.push({ name: c.name, @@ -176,21 +202,22 @@ export const sqlite: IntrospectionProvider = { default: c.dflt_value, options: [], unique: uniqueSingleColumn.has(c.name), - }) + unique_name: uniqueSingleColumn.has(c.name) ? `${tableName}_${c.name}_unique` : null, + }); } - tables.push({ schema, name: tableName, columns, type: t.type, definition: t.definition }) + tables.push({ schema, name: tableName, columns, type: t.type, definition: t.definition, indexes }); } - const enums: IntrospectedEnum[] = [] // SQLite doesn't support enums + const enums: IntrospectedEnum[] = []; // SQLite doesn't support enums - return { tables, enums } + return { tables, enums }; } finally { - db.close() + db.close(); } }, getDefaultValue(_args) { - throw new Error('Not implemented yet for SQLite') - } -} + throw new Error('Not implemented yet for SQLite'); + }, +}; diff --git a/packages/cli/src/actions/pull/utils.ts b/packages/cli/src/actions/pull/utils.ts index defd0f30..23462974 100644 --- a/packages/cli/src/actions/pull/utils.ts +++ b/packages/cli/src/actions/pull/utils.ts @@ -1,92 +1,95 @@ -import type { ZModelServices } from '@zenstackhq/language' +import type { ZModelServices } from '@zenstackhq/language'; import { - AbstractDeclaration, - DataField, - DataModel, - Enum, - EnumField, - isInvocationExpr, - type Attribute, - type Model -} from '@zenstackhq/language/ast' -import { getStringLiteral } from '@zenstackhq/language/utils' -import type { - DataSourceProviderType -} from '@zenstackhq/sdk/schema' -import type { Reference } from 'langium' + AbstractDeclaration, + DataField, + DataModel, + Enum, + EnumField, + FunctionDecl, + isInvocationExpr, + type Attribute, + type Model, +} from '@zenstackhq/language/ast'; +import { getStringLiteral } from '@zenstackhq/language/utils'; +import type { DataSourceProviderType } from '@zenstackhq/sdk/schema'; +import type { Reference } from 'langium'; export function getAttribute(model: Model, attrName: string) { - const references = model.$document! - .references as Reference[] - return references.find( - (a) => a.ref!.$type === 'Attribute' && a.ref!.name === attrName - )?.ref as Attribute | undefined + const references = model.$document!.references as Reference[]; + return references.find((a) => a.ref!.$type === 'Attribute' && a.ref!.name === attrName)?.ref as + | Attribute + | undefined; } export function getDatasource(model: Model) { - const datasource = model.declarations.find((d) => d.$type === 'DataSource') - if (!datasource) { - throw new Error('No datasource declaration found in the schema.') - } + const datasource = model.declarations.find((d) => d.$type === 'DataSource'); + if (!datasource) { + throw new Error('No datasource declaration found in the schema.'); + } - const urlField = datasource.fields.find((f) => f.name === 'url')! + const urlField = datasource.fields.find((f) => f.name === 'url')!; - let url = getStringLiteral(urlField.value) + let url = getStringLiteral(urlField.value); - if (!url && isInvocationExpr(urlField.value)) { - const envName = getStringLiteral(urlField.value.args[0]?.value) - if (!envName) { - throw new Error('The url field must be a string literal or an env().') - } - if (!process.env[envName]) { - throw new Error( - `Environment variable ${envName} is not set, please set it to the database connection string.` - ) + if (!url && isInvocationExpr(urlField.value)) { + const envName = getStringLiteral(urlField.value.args[0]?.value); + if (!envName) { + throw new Error('The url field must be a string literal or an env().'); + } + if (!process.env[envName]) { + throw new Error( + `Environment variable ${envName} is not set, please set it to the database connection string.`, + ); + } + url = process.env[envName]; } - url = process.env[envName] - } - if (!url) { - throw new Error('The url field must be a string literal or an env().') - } + if (!url) { + throw new Error('The url field must be a string literal or an env().'); + } - return { - name: datasource.name, - provider: getStringLiteral( - datasource.fields.find((f) => f.name === 'provider')?.value - ) as DataSourceProviderType, - url, - } + return { + name: datasource.name, + provider: getStringLiteral( + datasource.fields.find((f) => f.name === 'provider')?.value, + ) as DataSourceProviderType, + url, + }; } -export function getDbName( - decl: AbstractDeclaration | DataField | EnumField -): string { - if (!('attributes' in decl)) return decl.name - const nameAttr = decl.attributes.find( - (a) => a.decl.ref?.name === '@@map' || a.decl.ref?.name === '@map' - ) - if (!nameAttr) return decl.name - const attrValue = nameAttr.args[0]?.value +export function getDbName(decl: AbstractDeclaration | DataField | EnumField): string { + if (!('attributes' in decl)) return decl.name; + const nameAttr = decl.attributes.find((a) => a.decl.ref?.name === '@@map' || a.decl.ref?.name === '@map'); + if (!nameAttr) return decl.name; + const attrValue = nameAttr.args[0]?.value; - if (attrValue?.$type !== 'StringLiteral') return decl.name + if (attrValue?.$type !== 'StringLiteral') return decl.name; - return attrValue.value + return attrValue.value; } - -export function getDeclarationRef(type: T["$type"], name: string, services: ZModelServices) { - return services.shared.workspace.IndexManager.allElements(type).find((m) => m.node && getDbName(m.node as T) === name)?.node as T | undefined +export function getDeclarationRef( + type: T['$type'], + name: string, + services: ZModelServices, +) { + return services.shared.workspace.IndexManager.allElements(type).find( + (m) => m.node && getDbName(m.node as T) === name, + )?.node as T | undefined; } export function getEnumRef(name: string, services: ZModelServices) { - return getDeclarationRef('Enum', name, services); + return getDeclarationRef('Enum', name, services); } export function getModelRef(name: string, services: ZModelServices) { - return getDeclarationRef('DataModel', name, services); + return getDeclarationRef('DataModel', name, services); } export function getAttributeRef(name: string, services: ZModelServices) { - return getDeclarationRef('Attribute', name, services); -} \ No newline at end of file + return getDeclarationRef('Attribute', name, services); +} + +export function getFunctionRef(name: string, services: ZModelServices) { + return getDeclarationRef('FunctionDecl', name, services); +} diff --git a/packages/language/package.json b/packages/language/package.json index cd7c4d81..c9b92dce 100644 --- a/packages/language/package.json +++ b/packages/language/package.json @@ -49,6 +49,16 @@ "default": "./dist/utils.cjs" } }, + "./factory": { + "import": { + "types": "./dist/factory.d.ts", + "default": "./dist/factory.js" + }, + "require": { + "types": "./dist/factory.d.cts", + "default": "./dist/factory.cjs" + } + }, "./package.json": { "import": "./package.json", "require": "./package.json" diff --git a/packages/language/src/factory/attribute.ts b/packages/language/src/factory/attribute.ts new file mode 100644 index 00000000..a42c5e50 --- /dev/null +++ b/packages/language/src/factory/attribute.ts @@ -0,0 +1,275 @@ +import { AstFactory } from '.'; +import { + Attribute, + AttributeArg, + AttributeParam, + AttributeParamType, + DataFieldAttribute, + DataModelAttribute, + Expression, + InternalAttribute, + TypeDeclaration, + type Reference, + type RegularID, +} from '../ast'; +import { ExpressionBuilder } from './expression'; + +export class DataFieldAttributeFactory extends AstFactory { + args: AttributeArgFactory[] = []; + decl?: Reference; + constructor() { + super({ type: DataFieldAttribute }); + } + setDecl(decl: Attribute) { + this.decl = { + $refText: decl?.name ?? '', + ref: decl!, + }; + this.update({ + decl: this.decl, + }); + return this; + } + addArg(builder: (b: ExpressionBuilder) => AstFactory, name?: string) { + const factory = new AttributeArgFactory().setValue(builder); + if (name) { + factory.setName(name); + } + this.args.push(factory); + this.update({ + args: this.args, + }); + return this; + } +} + +export class DataModelAttributeFactory extends AstFactory { + args: AttributeArgFactory[] = []; + decl?: Reference; + constructor() { + super({ type: DataModelAttribute }); + } + setDecl(decl: Attribute) { + this.decl = { + $refText: decl?.name ?? '', + ref: decl!, + }; + this.update({ + decl: this.decl, + }); + return this; + } + addArg(builder: (b: ExpressionBuilder) => AstFactory, name?: string) { + const factory = new AttributeArgFactory().setValue(builder); + if (name) { + factory.setName(name); + } + this.args.push(factory); + this.update({ + args: this.args, + }); + return this; + } +} + +export class AttributeArgFactory extends AstFactory { + name?: RegularID = ''; + value?: AstFactory; + + constructor() { + super({ type: AttributeArg }); + } + + setName(name: RegularID) { + this.name = name; + this.update({ + name: this.name, + }); + return this; + } + + setValue(builder: (b: ExpressionBuilder) => AstFactory) { + this.value = builder(ExpressionBuilder()); + this.update({ + value: this.value, + }); + return this; + } +} + +export class InternalAttributeFactory extends AstFactory { + decl?: Reference; + args: AttributeArgFactory[] = []; + + constructor() { + super({ type: InternalAttribute }); + } + + setDecl(decl: Attribute) { + this.decl = { + $refText: decl.name, + ref: decl, + }; + this.update({ + decl: this.decl, + }); + return this; + } + + addArg(builder: (b: ExpressionBuilder) => AstFactory, name?: string) { + const factory = new AttributeArgFactory().setValue(builder); + if (name) { + factory.setName(name); + } + this.args.push(factory); + this.update({ + args: this.args, + }); + return this; + } +} + +export class AttributeParamFactory extends AstFactory { + attributes: InternalAttributeFactory[] = []; + comments: string[] = []; + default?: boolean; + name?: RegularID; + type?: AttributeParamTypeFactory; + + constructor() { + super({ + type: AttributeParam, + node: { + comments: [], + attributes: [], + }, + }); + } + + addAttribute(builder: (b: InternalAttributeFactory) => InternalAttributeFactory) { + this.attributes.push(builder(new InternalAttributeFactory())); + this.update({ + attributes: this.attributes, + }); + return this; + } + + setComments(comments: string[]) { + this.comments = comments; + this.update({ + comments: this.comments, + }); + return this; + } + + setDefault(defaultValue: boolean) { + this.default = defaultValue; + this.update({ + default: this.default, + }); + return this; + } + + setName(name: string) { + this.name = name; + this.update({ + name: this.name, + }); + return this; + } + + setType(builder: (b: AttributeParamTypeFactory) => AttributeParamTypeFactory) { + this.type = builder(new AttributeParamTypeFactory()); + this.update({ + type: this.type, + }); + return this; + } +} + +export class AttributeParamTypeFactory extends AstFactory { + array?: boolean; + optional?: boolean; + reference?: Reference; + type?: AttributeParamType['type']; + constructor() { + super({ type: AttributeParamType }); + } + setArray(array: boolean) { + this.array = array; + this.update({ + array: this.array, + }); + return this; + } + + setOptional(optional: boolean) { + this.optional = optional; + this.update({ + optional: this.optional, + }); + return this; + } + + setReference(reference: TypeDeclaration) { + this.reference = { + $refText: reference.name, + ref: reference, + }; + this.update({ + reference: this.reference, + }); + return this; + } + + setType(type: AttributeParamType['type']) { + this.type = type; + this.update({ + type: this.type, + }); + return this; + } +} + +export class AttributeFactory extends AstFactory { + name?: string; + comments: string[] = []; + attributes: InternalAttributeFactory[] = []; + params: AttributeParamFactory[] = []; + + constructor() { + super({ type: Attribute, node: { comments: [], attributes: [], params: [] } }); + } + + setName(name: string) { + this.name = name; + this.update({ + name: this.name, + }); + return this; + } + + setComments(comments: string[]) { + this.comments = comments; + this.update({ + comments: this.comments, + }); + return this; + } + + addAttribute(builder: (b: InternalAttributeFactory) => InternalAttributeFactory) { + this.attributes.push(builder(new InternalAttributeFactory())); + this.update({ + attributes: this.attributes, + }); + return this; + } + + addParam(builder: (b: AttributeParamFactory) => AttributeParamFactory) { + this.params.push(builder(new AttributeParamFactory())); + this.update({ + params: this.params, + }); + return this; + } +} diff --git a/packages/language/src/factory/declaration.ts b/packages/language/src/factory/declaration.ts new file mode 100644 index 00000000..1f514982 --- /dev/null +++ b/packages/language/src/factory/declaration.ts @@ -0,0 +1,363 @@ +import { AstFactory } from '.'; +import { AbstractDeclaration, type Reference } from '../ast'; +import { + type BuiltinType, + DataField, + DataFieldType, + DataModel, + Enum, + EnumField, + LiteralExpr, + Model, + ModelImport, + type RegularID, + type RegularIDWithTypeNames, + TypeDeclaration, + type TypeDef, + UnsupportedFieldType, +} from '../generated/ast'; +import { AttributeFactory, DataFieldAttributeFactory, DataModelAttributeFactory } from './attribute'; +import { ExpressionBuilder } from './expression'; +export const DeclarationBuilder = () => + ({ + get Attribute() { + return new AttributeFactory(); + }, + get DataModel() { + return new DataModelFactory(); + }, + get DataSource(): any { + throw new Error('DataSource is not implemented'); + }, + get Enum() { + return new EnumFactory(); + }, + get FunctionDecl(): any { + throw new Error('FunctionDecl is not implemented'); + }, + get GeneratorDecl(): any { + throw new Error('GeneratorDecl is not implemented'); + }, + get Plugin(): any { + throw new Error('Plugin is not implemented'); + }, + get Procedure(): any { + throw new Error('Procedure is not implemented'); + }, + get TypeDef(): any { + throw new Error('TypeDef is not implemented'); + }, + }) satisfies DeclarationBuilderType; +type DeclarationBuilderType = { + [K in T['$type']]: AstFactory>; +}; +type DeclarationBuilderMap = ReturnType; + +export type DeclarationBuilder = Pick< + DeclarationBuilderMap, + Extract +>; + +export class DataModelFactory extends AstFactory { + attributes: DataModelAttributeFactory[] = []; + baseModel?: Reference; + comments: string[] = []; + fields: DataFieldFactory[] = []; + isView?: boolean; + mixins: Reference[] = []; + name?: RegularID; + + constructor() { + super({ + type: DataModel, + node: { + attributes: [], + comments: [], + fields: [], + mixins: [], + }, + }); + } + + addAttribute(builder: (attr: DataModelAttributeFactory) => DataModelAttributeFactory) { + this.attributes.push(builder(new DataModelAttributeFactory())); + this.update({ + attributes: this.attributes, + }); + return this; + } + + setBaseModel(model: Reference) { + this.baseModel = model; + this.update({ + baseModel: this.baseModel, + }); + return this; + } + + setComments(comments: string[]) { + this.comments = comments; + this.update({ + comments: this.comments, + }); + return this; + } + + addField(builder: (field: DataFieldFactory) => DataFieldFactory) { + this.fields.push(builder(new DataFieldFactory())); + this.update({ + fields: this.fields, + }); + return this; + } + + setIsView(isView: boolean) { + this.isView = isView; + this.update({ + isView: this.isView, + }); + return this; + } + + addMixin(mixin: Reference) { + this.mixins.push(mixin); + this.update({ + mixins: this.mixins, + }); + return this; + } + + setName(name: string) { + this.name = name; + this.update({ + name: this.name, + }); + return this; + } +} + +export class DataFieldFactory extends AstFactory { + attributes: DataFieldAttributeFactory[] = []; + comments: string[] = []; + name?: string; + type?: DataFieldTypeFactory; + + constructor() { + super({ type: DataField, node: { attributes: [], comments: [] } }); + } + + addAttribute( + builder: ((attr: DataFieldAttributeFactory) => DataFieldAttributeFactory) | DataFieldAttributeFactory, + ) { + if (builder instanceof DataFieldAttributeFactory) { + builder.setContainer(this.node); + this.attributes.push(builder); + } else { + this.attributes.push(builder(new DataFieldAttributeFactory())); + } + this.update({ + attributes: this.attributes, + }); + return this; + } + + setComments(comments: string[]) { + this.comments = comments; + this.update({ + comments: this.comments, + }); + return this; + } + + setName(name: string) { + this.name = name; + this.update({ + name: this.name, + }); + return this; + } + + setType(builder: (type: DataFieldTypeFactory) => DataFieldTypeFactory) { + this.type = builder(new DataFieldTypeFactory()); + this.update({ + type: this.type, + }); + return this; + } +} + +export class DataFieldTypeFactory extends AstFactory { + array?: boolean; + optional?: boolean; + reference?: Reference; + type?: BuiltinType; + unsupported?: UnsupportedFieldTypeFactory; + + constructor() { + super({ type: DataFieldType }); + } + + setArray(array: boolean) { + this.array = array; + this.update({ + array: this.array, + }); + return this; + } + + setOptional(optional: boolean) { + this.optional = optional; + this.update({ + optional: this.optional, + }); + return this; + } + + setReference(reference: TypeDeclaration) { + this.reference = { + $refText: reference.name, + ref: reference, + }; + this.update({ + reference: this.reference, + }); + return this; + } + + setType(type: BuiltinType) { + this.type = type; + this.update({ + type: this.type, + }); + return this; + } + + setUnsupported(builder: (a: UnsupportedFieldTypeFactory) => UnsupportedFieldTypeFactory) { + this.unsupported = builder(new UnsupportedFieldTypeFactory()); + this.update({ + unsupported: this.unsupported, + }); + return this; + } +} + +export class UnsupportedFieldTypeFactory extends AstFactory { + value?: AstFactory; + constructor() { + super({ type: UnsupportedFieldType }); + } + setValue(builder: (value: ExpressionBuilder) => AstFactory) { + this.value = builder(ExpressionBuilder()); + this.update({ + value: this.value!, + }); + return this; + } +} + +export class ModelFactory extends AstFactory { + declarations: AstFactory[] = []; + imports: ModelImportFactory[] = []; + constructor() { + super({ type: Model, node: { declarations: [], imports: [] } }); + } + addImport(builder: (b: ModelImportFactory) => ModelImportFactory) { + this.imports.push(builder(new ModelImportFactory())); + this.update({ + imports: this.imports, + }); + return this; + } + addDeclaration(builder: (b: DeclarationBuilder) => AstFactory) { + this.declarations.push(builder(DeclarationBuilder())); + this.update({ + declarations: this.declarations, + }); + return this; + } +} + +export class ModelImportFactory extends AstFactory { + path?: string | undefined; + + constructor() { + super({ type: ModelImport }); + } + + setPath(path: string) { + this.path = path; + this.update({ + path: this.path, + }); + return this; + } +} + +export class EnumFactory extends AstFactory { + name?: string; + comments: string[] = []; + fields: EnumFieldFactory[] = []; + attributes: DataModelAttributeFactory[] = []; + + constructor() { + super({ type: Enum, node: { comments: [], fields: [], attributes: [] } }); + } + + addField(builder: (b: EnumFieldFactory) => EnumFieldFactory) { + this.fields.push(builder(new EnumFieldFactory())); + this.update({ + fields: this.fields, + }); + return this; + } + + addAttribute(builder: (b: DataModelAttributeFactory) => DataModelAttributeFactory) { + this.attributes.push(builder(new DataModelAttributeFactory())); + this.update({ + attributes: this.attributes, + }); + return this; + } + + setName(name: string) { + this.name = name; + this.update({ + name: this.name, + }); + return this; + } +} + +export class EnumFieldFactory extends AstFactory { + name?: RegularIDWithTypeNames; + comments: string[] = []; + attributes: DataFieldAttributeFactory[] = []; + + constructor() { + super({ type: EnumField, node: { comments: [], attributes: [] } }); + } + + setName(name: RegularIDWithTypeNames) { + this.name = name; + this.update({ + name: this.name, + }); + return this; + } + + addAttribute(builder: (b: DataFieldAttributeFactory) => DataFieldAttributeFactory) { + this.attributes.push(builder(new DataFieldAttributeFactory())); + this.update({ + attributes: this.attributes, + }); + return this; + } + + addComment(comment: string) { + this.comments.push(comment); + this.update({ + comments: this.comments, + }); + return this; + } +} diff --git a/packages/language/src/factory/expression.ts b/packages/language/src/factory/expression.ts new file mode 100644 index 00000000..ea8e984d --- /dev/null +++ b/packages/language/src/factory/expression.ts @@ -0,0 +1,303 @@ +import type { Reference } from 'langium'; +import { AstFactory } from '.'; +import { + Argument, + ArrayExpr, + BinaryExpr, + FieldInitializer, + FunctionDecl, + InvocationExpr, + MemberAccessExpr, + MemberAccessTarget, + ObjectExpr, + ReferenceArg, + ReferenceExpr, + ReferenceTarget, + UnaryExpr, + type Expression, + type RegularID, +} from '../ast'; +import { + BooleanLiteralFactory, + NullExprFactory, + NumberLiteralFactory, + StringLiteralFactory, + ThisExprFactory, +} from './primitives'; + +export const ExpressionBuilder = () => + ({ + get ArrayExpr() { + return new ArrayExprFactory(); + }, + get BinaryExpr() { + return new BinaryExprFactory(); + }, + get BooleanLiteral() { + return new BooleanLiteralFactory(); + }, + get InvocationExpr() { + return new InvocationExprFactory(); + }, + get MemberAccessExpr() { + return new MemberAccessExprFactory(); + }, + get NullExpr() { + return new NullExprFactory(); + }, + get NumberLiteral() { + return new NumberLiteralFactory(); + }, + get ObjectExpr() { + return new ObjectExprFactory(); + }, + get ReferenceExpr() { + return new ReferenceExprFactory(); + }, + get StringLiteral() { + return new StringLiteralFactory(); + }, + get ThisExpr() { + return new ThisExprFactory(); + }, + get UnaryExpr() { + return new UnaryExprFactory(); + }, + }) satisfies ExpressionBuilderType; +type ExpressionBuilderType = { + [K in T['$type']]: AstFactory>; +}; + +type ExpressionFactoryMap = ReturnType; + +export type ExpressionBuilder = Pick< + ExpressionFactoryMap, + Extract +>; + +export class UnaryExprFactory extends AstFactory { + operand?: AstFactory; + + constructor() { + super({ type: UnaryExpr, node: { operator: '!' } }); + } + + setOperand(builder: (a: ExpressionBuilder) => AstFactory) { + this.operand = builder(ExpressionBuilder()); + this.update({ + operand: this.operand, + }); + return this; + } +} + +export class ReferenceExprFactory extends AstFactory { + target?: Reference; + args: ReferenceArgFactory[] = []; + + constructor() { + super({ type: ReferenceExpr, node: { args: [] } }); + } + + setTarget(target: ReferenceTarget) { + this.target = { + $refText: target.name, + ref: target, + }; + this.update({ + target: this.target, + }); + return this; + } + + addArg(builder: (a: ReferenceArgFactory) => ReferenceArgFactory) { + this.args.push(builder(new ReferenceArgFactory())); + this.update({ + args: this.args, + }); + return this; + } +} + +export class ReferenceArgFactory extends AstFactory { + name?: string; + value?: AstFactory; + + constructor() { + super({ type: ReferenceArg }); + } + + setName(name: string) { + this.name = name; + this.update({ + name: this.name, + }); + return this; + } + + setValue(builder: (a: ExpressionBuilder) => AstFactory) { + this.value = builder(ExpressionBuilder()); + this.update({ + value: this.value, + }); + return this; + } +} + +export class MemberAccessExprFactory extends AstFactory { + member?: Reference; + operand?: AstFactory; + + constructor() { + super({ type: MemberAccessExpr }); + } + + setMember(target: Reference) { + this.member = target; + this.update({ + member: this.member, + }); + return this; + } + + setOperand(builder: (b: ExpressionBuilder) => AstFactory) { + this.operand = builder(ExpressionBuilder()); + this.update({ + operand: this.operand, + }); + return this; + } +} + +export class ObjectExprFactory extends AstFactory { + fields: FieldInitializerFactory[] = []; + + constructor() { + super({ type: ObjectExpr, node: { fields: [] } }); + } + + addField(builder: (b: FieldInitializerFactory) => FieldInitializerFactory) { + this.fields.push(builder(new FieldInitializerFactory())); + this.update({ + fields: this.fields, + }); + return this; + } +} + +export class FieldInitializerFactory extends AstFactory { + name?: RegularID; + value?: AstFactory; + + constructor() { + super({ type: FieldInitializer }); + } + + setName(name: RegularID) { + this.name = name; + this.update({ + name: this.name!, + }); + return this; + } + + setValue(builder: (a: ExpressionBuilder) => AstFactory) { + this.value = builder(ExpressionBuilder()); + this.update({ + value: this.value!, + }); + return this; + } +} + +export class InvocationExprFactory extends AstFactory { + args: ArgumentFactory[] = []; + function?: Reference; + + constructor() { + super({ type: InvocationExpr, node: { args: [] } }); + } + + addArg(builder: (arg: ArgumentFactory) => ArgumentFactory) { + this.args.push(builder(new ArgumentFactory())); + this.update({ + args: this.args, + }); + return this; + } + + setFunction(value: FunctionDecl) { + this.function = { + $refText: value.name, + ref: value, + }; + this.update({ + function: this.function!, + }); + return this; + } +} + +export class ArgumentFactory extends AstFactory { + value?: AstFactory; + + constructor() { + super({ type: Argument }); + } + + setValue(builder: (a: ExpressionBuilder) => AstFactory) { + this.value = builder(ExpressionBuilder()); + this.update({ + value: this.value!, + }); + return this; + } +} + +export class ArrayExprFactory extends AstFactory { + items: AstFactory[] = []; + + constructor() { + super({ type: ArrayExpr, node: { items: [] } }); + } + + addItem(builder: (a: ExpressionBuilder) => AstFactory) { + this.items.push(builder(ExpressionBuilder())); + this.update({ + items: this.items, + }); + return this; + } +} + +export class BinaryExprFactory extends AstFactory { + operator?: BinaryExpr['operator']; + right?: AstFactory; + left?: AstFactory; + + constructor() { + super({ type: BinaryExpr }); + } + + setOperator(operator: BinaryExpr['operator']) { + this.operator = operator; + this.update({ + operator: this.operator!, + }); + return this; + } + setRight(builder: (arg: ExpressionBuilder) => AstFactory) { + this.right = builder(ExpressionBuilder()); + this.update({ + right: this.right!, + }); + return this; + } + setLeft(builder: (arg: ExpressionBuilder) => AstFactory) { + this.left = builder(ExpressionBuilder()); + this.update({ + left: this.left!, + }); + return this; + } +} diff --git a/packages/language/src/factory/index.ts b/packages/language/src/factory/index.ts new file mode 100644 index 00000000..e05891ab --- /dev/null +++ b/packages/language/src/factory/index.ts @@ -0,0 +1,61 @@ +import { type AstNode } from '../ast'; + +export type ContainerProps = { + $container: T; + $containerProperty?: string; + $containerIndex?: number; +}; + +type NodeFactoriesFor = { + [K in keyof N as {} extends Pick ? never : K]: N[K] extends (infer U)[] + ? (AstFactory | U)[] + : AstFactory | N[K]; +} & { + [K in keyof N as {} extends Pick ? K : never]?: N[K] extends (infer U)[] + ? (AstFactory | U)[] + : AstFactory | N[K]; +}; + +export abstract class AstFactory { + node = {} as T; + constructor({ type, node }: { type: T['$type']; node?: Partial }) { + (this.node as any).$type = type; + if (node) { + this.update(node); + } + } + setContainer(container: T['$container']) { + (this.node as any).$container = container; + return this; + } + + get(params?: ContainerProps): T { + if (params) this.update(params as any); + return this.node; + } + update(nodeArg: Partial>): T { + const keys = Object.keys(nodeArg as object); + keys.forEach((key) => { + const child = (nodeArg as any)[key]; + if (child instanceof AstFactory) { + (this.node as any)[key] = child.get({ $container: this.node as any }); + } else if (Array.isArray(child)) { + (this.node as any)[key] = child.map((item: any) => + item instanceof AstFactory ? item.get({ $container: this.node as any }) : item, + ); + } else { + (this.node as any)[key] = child; + } + }); + return this.node; + } + + resolveChilds(nodeArg: T | NodeFactoriesFor): T { + return this.update(nodeArg); + } +} + +export * from './primitives'; +export * from './expression'; +export * from './declaration'; +export * from './attribute'; diff --git a/packages/language/src/factory/primitives.ts b/packages/language/src/factory/primitives.ts new file mode 100644 index 00000000..1db7e051 --- /dev/null +++ b/packages/language/src/factory/primitives.ts @@ -0,0 +1,61 @@ +import { AstFactory } from '.'; +import { BooleanLiteral, NullExpr, NumberLiteral, StringLiteral, ThisExpr } from '../ast'; + +export class ThisExprFactory extends AstFactory { + constructor() { + super({ type: ThisExpr, node: { value: 'this' } }); + } +} + +export class NullExprFactory extends AstFactory { + constructor() { + super({ type: NullExpr, node: { value: 'null' } }); + } +} + +export class NumberLiteralFactory extends AstFactory { + value?: number | string; + + constructor() { + super({ type: NumberLiteral }); + } + + setValue(value: number | string) { + this.value = value; + this.update({ + value: this.value.toString(), + }); + return this; + } +} + +export class StringLiteralFactory extends AstFactory { + value?: string; + + constructor() { + super({ type: StringLiteral }); + } + + setValue(value: string) { + this.value = value; + this.update({ + value: this.value, + }); + return this; + } +} +export class BooleanLiteralFactory extends AstFactory { + value?: boolean; + + constructor() { + super({ type: BooleanLiteral }); + } + + setValue(value: boolean) { + this.value = value; + this.update({ + value: this.value, + }); + return this; + } +} diff --git a/packages/language/tsup.config.ts b/packages/language/tsup.config.ts index 0d5d2b6c..48282a08 100644 --- a/packages/language/tsup.config.ts +++ b/packages/language/tsup.config.ts @@ -5,6 +5,7 @@ export default defineConfig({ index: 'src/index.ts', ast: 'src/ast.ts', utils: 'src/utils.ts', + factory: 'src/factory/index.ts', }, outDir: 'dist', splitting: false, From ec56faa96d02fb931cdaeac78a21f0da04374195 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Mon, 6 Oct 2025 01:33:28 +0200 Subject: [PATCH 08/29] fix: ast factory import order --- packages/language/src/factory/ast-factory.ts | 56 +++++++++++++++++++ packages/language/src/factory/index.ts | 58 +------------------- 2 files changed, 57 insertions(+), 57 deletions(-) create mode 100644 packages/language/src/factory/ast-factory.ts diff --git a/packages/language/src/factory/ast-factory.ts b/packages/language/src/factory/ast-factory.ts new file mode 100644 index 00000000..e01dd7ce --- /dev/null +++ b/packages/language/src/factory/ast-factory.ts @@ -0,0 +1,56 @@ +import { type AstNode } from '../ast'; + +export type ContainerProps = { + $container: T; + $containerProperty?: string; + $containerIndex?: number; +}; + +type NodeFactoriesFor = { + [K in keyof N as {} extends Pick ? never : K]: N[K] extends (infer U)[] + ? (AstFactory | U)[] + : AstFactory | N[K]; +} & { + [K in keyof N as {} extends Pick ? K : never]?: N[K] extends (infer U)[] + ? (AstFactory | U)[] + : AstFactory | N[K]; +}; + +export abstract class AstFactory { + node = {} as T; + constructor({ type, node }: { type: T['$type']; node?: Partial }) { + (this.node as any).$type = type; + if (node) { + this.update(node); + } + } + setContainer(container: T['$container']) { + (this.node as any).$container = container; + return this; + } + + get(params?: ContainerProps): T { + if (params) this.update(params as any); + return this.node; + } + update(nodeArg: Partial>): T { + const keys = Object.keys(nodeArg as object); + keys.forEach((key) => { + const child = (nodeArg as any)[key]; + if (child instanceof AstFactory) { + (this.node as any)[key] = child.get({ $container: this.node as any }); + } else if (Array.isArray(child)) { + (this.node as any)[key] = child.map((item: any) => + item instanceof AstFactory ? item.get({ $container: this.node as any }) : item, + ); + } else { + (this.node as any)[key] = child; + } + }); + return this.node; + } + + resolveChilds(nodeArg: T | NodeFactoriesFor): T { + return this.update(nodeArg); + } +} diff --git a/packages/language/src/factory/index.ts b/packages/language/src/factory/index.ts index e05891ab..1ea2a286 100644 --- a/packages/language/src/factory/index.ts +++ b/packages/language/src/factory/index.ts @@ -1,60 +1,4 @@ -import { type AstNode } from '../ast'; - -export type ContainerProps = { - $container: T; - $containerProperty?: string; - $containerIndex?: number; -}; - -type NodeFactoriesFor = { - [K in keyof N as {} extends Pick ? never : K]: N[K] extends (infer U)[] - ? (AstFactory | U)[] - : AstFactory | N[K]; -} & { - [K in keyof N as {} extends Pick ? K : never]?: N[K] extends (infer U)[] - ? (AstFactory | U)[] - : AstFactory | N[K]; -}; - -export abstract class AstFactory { - node = {} as T; - constructor({ type, node }: { type: T['$type']; node?: Partial }) { - (this.node as any).$type = type; - if (node) { - this.update(node); - } - } - setContainer(container: T['$container']) { - (this.node as any).$container = container; - return this; - } - - get(params?: ContainerProps): T { - if (params) this.update(params as any); - return this.node; - } - update(nodeArg: Partial>): T { - const keys = Object.keys(nodeArg as object); - keys.forEach((key) => { - const child = (nodeArg as any)[key]; - if (child instanceof AstFactory) { - (this.node as any)[key] = child.get({ $container: this.node as any }); - } else if (Array.isArray(child)) { - (this.node as any)[key] = child.map((item: any) => - item instanceof AstFactory ? item.get({ $container: this.node as any }) : item, - ); - } else { - (this.node as any)[key] = child; - } - }); - return this.node; - } - - resolveChilds(nodeArg: T | NodeFactoriesFor): T { - return this.update(nodeArg); - } -} - +export * from './ast-factory'; export * from './primitives'; export * from './expression'; export * from './declaration'; From f19c6f5d0190cc42d895becfffd05e1cfe919cbc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Mon, 6 Oct 2025 03:05:23 +0200 Subject: [PATCH 09/29] fix: some runtime bugs --- packages/cli/src/actions/db.ts | 87 ++++++++++--------- packages/cli/src/actions/pull/index.ts | 92 ++++++++++++++------- packages/language/src/factory/attribute.ts | 6 +- packages/language/src/factory/expression.ts | 8 +- 4 files changed, 118 insertions(+), 75 deletions(-) diff --git a/packages/cli/src/actions/db.ts b/packages/cli/src/actions/db.ts index 8dea5cd9..c9a3ef4c 100644 --- a/packages/cli/src/actions/db.ts +++ b/packages/cli/src/actions/db.ts @@ -68,59 +68,64 @@ async function runPush(options: PushOptions) { } async function runPull(options: PullOptions) { - const schemaFile = getSchemaFile(options.schema); - const { model, services } = await loadSchemaDocumentWithServices(schemaFile); - config(); - const SUPPORTED_PROVIDERS = ['sqlite', 'postgresql']; - const datasource = getDatasource(model); - - if (!datasource) { - throw new Error('No datasource found in the schema.'); - } + try { + const schemaFile = getSchemaFile(options.schema); + const { model, services } = await loadSchemaDocumentWithServices(schemaFile); + config(); + const SUPPORTED_PROVIDERS = ['sqlite', 'postgresql']; + const datasource = getDatasource(model); + + if (!datasource) { + throw new Error('No datasource found in the schema.'); + } - if (!SUPPORTED_PROVIDERS.includes(datasource.provider)) { - throw new Error(`Unsupported datasource provider: ${datasource.provider}`); - } + if (!SUPPORTED_PROVIDERS.includes(datasource.provider)) { + throw new Error(`Unsupported datasource provider: ${datasource.provider}`); + } - const provider = providers[datasource.provider]; + const provider = providers[datasource.provider]; - if (!provider) { - throw new Error(`No introspection provider found for: ${datasource.provider}`); - } + if (!provider) { + throw new Error(`No introspection provider found for: ${datasource.provider}`); + } - const { enums, tables } = await provider.introspect(datasource.url); + const { enums, tables } = await provider.introspect(datasource.url); - const newModel: Model = { - $type: 'Model', - $container: undefined, - $containerProperty: undefined, - $containerIndex: undefined, - declarations: [...model.declarations.filter((d) => ['DataSource'].includes(d.$type))], - imports: [], - }; + const newModel: Model = { + $type: 'Model', + $container: undefined, + $containerProperty: undefined, + $containerIndex: undefined, + declarations: [...model.declarations.filter((d) => ['DataSource'].includes(d.$type))], + imports: [], + }; - syncEnums({ dbEnums: enums, model: newModel, services, options }); + syncEnums({ dbEnums: enums, model: newModel, services, options }); - const resolvedRelations: Relation[] = []; - for (const table of tables) { - const relations = syncTable({ table, model: newModel, provider, services, options }); - resolvedRelations.push(...relations); - } + const resolvedRelations: Relation[] = []; + for (const table of tables) { + const relations = syncTable({ table, model: newModel, provider, services, options }); + resolvedRelations.push(...relations); + } - for (const relation of resolvedRelations) { - syncRelation({ model: newModel, relation, services, options }); - } + for (const relation of resolvedRelations) { + syncRelation({ model: newModel, relation, services, options }); + } - //TODO: diff models and apply changes only + //TODO: diff models and apply changes only - const generator = new ZModelCodeGenerator(); + const generator = new ZModelCodeGenerator(); - const zmodelSchema = generator.generate(newModel); + const zmodelSchema = generator.generate(newModel); - console.log(options.out ? `Writing to ${options.out}` : schemaFile); + console.log(options.out ? `Writing to ${options.out}` : schemaFile); - const outPath = options.out ? path.resolve(options.out) : schemaFile; - console.log(outPath); + const outPath = options.out ? path.resolve(options.out) : schemaFile; + console.log(outPath); - fs.writeFileSync(outPath, zmodelSchema); + fs.writeFileSync(outPath, zmodelSchema); + } catch (error) { + console.log(error); + throw error; + } } diff --git a/packages/cli/src/actions/pull/index.ts b/packages/cli/src/actions/pull/index.ts index 708244a3..de06ca60 100644 --- a/packages/cli/src/actions/pull/index.ts +++ b/packages/cli/src/actions/pull/index.ts @@ -151,26 +151,13 @@ export function syncTable({ const modelFactory = new DataModelFactory().setName(name).setIsView(table.type === 'view'); modelFactory.setContainer(model); + if (modified) { modelFactory.addAttribute((builder) => builder.setDecl(tableMapAttribute).addArg((argBuilder) => argBuilder.StringLiteral.setValue(table.name)), ); } - if (multiPk) { - const pkColumns = table.columns.filter((c) => c.pk).map((c) => c.name); - modelFactory.addAttribute((builder) => - builder.setDecl(modelIdAttribute).addArg((argBuilder) => { - const arrayExpr = argBuilder.ArrayExpr; - pkColumns.map((c) => { - const ref = modelFactory.node.fields.find((f) => getDbName(f) === c)!; - arrayExpr.addItem((itemBuilder) => itemBuilder.ReferenceExpr.setTarget(ref)); - }); - return arrayExpr; - }), - ); - } - table.columns.forEach((column) => { if (column.foreign_key_table) { relations.push({ @@ -231,7 +218,7 @@ export function syncTable({ enums: model.declarations.filter((d) => d.$type === 'Enum') as Enum[], }) : []; - defaultValuesAttrs.forEach(builder.addAttribute); + defaultValuesAttrs.forEach(builder.addAttribute.bind(builder)); } if (column.pk && !multiPk) { @@ -254,12 +241,12 @@ export function syncTable({ }); }); - const uniqieColumns = table.columns.filter((c) => c.unique && !c.pk).map((c) => c.name); - if (uniqieColumns.length > 0) { + const pkColumns = table.columns.filter((c) => c.pk).map((c) => c.name); + if (multiPk) { modelFactory.addAttribute((builder) => - builder.setDecl(modelUniqueAttribute).addArg((argBuilder) => { + builder.setDecl(modelIdAttribute).addArg((argBuilder) => { const arrayExpr = argBuilder.ArrayExpr; - uniqieColumns.map((c) => { + pkColumns.map((c) => { const ref = modelFactory.node.fields.find((f) => getDbName(f) === c)!; arrayExpr.addItem((itemBuilder) => itemBuilder.ReferenceExpr.setTarget(ref)); }); @@ -268,21 +255,65 @@ export function syncTable({ ); } - model.declarations.push(modelFactory.node); - - table.indexes.forEach((index) => { + const uniqieColumns = table.columns.filter((c) => c.unique && !c.pk).map((c) => c.name); + if (uniqieColumns.length > 0) { modelFactory.addAttribute((builder) => - builder.setDecl(modelindexAttribute).addArg((argBuilder) => { + builder.setDecl(modelUniqueAttribute).addArg((argBuilder) => { const arrayExpr = argBuilder.ArrayExpr; - index.columns.map((c) => { - const ref = modelFactory.node.fields.find((f) => getDbName(f) === c.name)!; + uniqieColumns.map((c) => { + const ref = modelFactory.node.fields.find((f) => getDbName(f) === c)!; arrayExpr.addItem((itemBuilder) => itemBuilder.ReferenceExpr.setTarget(ref)); }); return arrayExpr; }), ); + } + + table.indexes.forEach((index) => { + if (index.predicate) { + //These constraints are not supported by Zenstack, because Zenstack currently does not fully support check constraints. Read more: https://pris.ly/d/check-constraints + console.log( + 'These constraints are not supported by Zenstack. Read more: https://pris.ly/d/check-constraints', + `- Model: "${table.name}", constraint: "${index.name}"`, + ); + return; + } + if (index.columns.find((c) => c.expression)) { + console.log( + 'These constraints are not supported by Zenstack. Read more: https://pris.ly/d/check-constraints', + `- Model: "${table.name}", constraint: "${index.name}"`, + ); + return; + } + + if (index.columns.length === 1 && index.columns.find((c) => pkColumns.includes(c.name))) { + //skip primary key + return; + } + + modelFactory.addAttribute((builder) => + builder + .setDecl(index.unique ? modelUniqueAttribute : modelindexAttribute) + .addArg((argBuilder) => { + const arrayExpr = argBuilder.ArrayExpr; + index.columns.map((c) => { + const ref = modelFactory.node.fields.find((f) => getDbName(f) === c.name)!; + if (!ref) console.log(c, table.name); + arrayExpr.addItem((itemBuilder) => { + const refExpr = itemBuilder.ReferenceExpr.setTarget(ref); + if (c.order !== 'ASC') refExpr.addArg((ab) => ab.StringLiteral.setValue('DESC'), 'sort'); + + return refExpr; + }); + }); + return arrayExpr; + }) + .addArg((argBuilder) => argBuilder.StringLiteral.setValue(index.name), 'map'), + ); }); + model.declarations.push(modelFactory.node); + return relations; } @@ -327,12 +358,15 @@ export function syncRelation({ const fieldPrefix = /[0-9]/g.test(sourceModel.name.charAt(0)) ? '_' : ''; const relationName = `${sourceModel.name}_${relation.column}To${targetModel.name}_${relation.references.column}`; + let sourceFieldName = `${fieldPrefix}${sourceModel.name.charAt(0).toLowerCase()}${sourceModel.name.slice(1)}_${relation.column}`; + + if (sourceModel.fields.find((f) => f.name === sourceFieldName)) { + sourceFieldName = `${sourceFieldName}To${targetModel.name.charAt(0).toLowerCase()}${targetModel.name.slice(1)}_${relation.references.column}`; + } const sourceFieldFactory = new DataFieldFactory() .setContainer(sourceModel) - .setName( - `${fieldPrefix}${sourceModel.name.charAt(0).toLowerCase()}${sourceModel.name.slice(1)}_${relation.column}`, - ) + .setName(sourceFieldName) .setType((tb) => tb .setOptional(relation.nullable) @@ -345,7 +379,7 @@ export function syncRelation({ .addArg((ab) => ab.StringLiteral.setValue(relationName)) .addArg((ab) => ab.ArrayExpr.addItem((aeb) => aeb.ReferenceExpr.setTarget(sourceField)), 'fields') .addArg((ab) => ab.ArrayExpr.addItem((aeb) => aeb.ReferenceExpr.setTarget(targetField)), 'references') - .addArg((ab) => ab.ArrayExpr.addItem((aeb) => aeb.StringLiteral.setValue(relation.fk_name)), 'map'), + .addArg((ab) => ab.StringLiteral.setValue(relation.fk_name), 'map'), ); sourceModel.fields.push(sourceFieldFactory.node); diff --git a/packages/language/src/factory/attribute.ts b/packages/language/src/factory/attribute.ts index a42c5e50..52aeebc7 100644 --- a/packages/language/src/factory/attribute.ts +++ b/packages/language/src/factory/attribute.ts @@ -18,7 +18,7 @@ export class DataFieldAttributeFactory extends AstFactory { args: AttributeArgFactory[] = []; decl?: Reference; constructor() { - super({ type: DataFieldAttribute }); + super({ type: DataFieldAttribute, node: { args: [] } }); } setDecl(decl: Attribute) { this.decl = { @@ -47,7 +47,7 @@ export class DataModelAttributeFactory extends AstFactory { args: AttributeArgFactory[] = []; decl?: Reference; constructor() { - super({ type: DataModelAttribute }); + super({ type: DataModelAttribute, node: { args: [] } }); } setDecl(decl: Attribute) { this.decl = { @@ -102,7 +102,7 @@ export class InternalAttributeFactory extends AstFactory { args: AttributeArgFactory[] = []; constructor() { - super({ type: InternalAttribute }); + super({ type: InternalAttribute, node: { args: [] } }); } setDecl(decl: Attribute) { diff --git a/packages/language/src/factory/expression.ts b/packages/language/src/factory/expression.ts index ea8e984d..a0ba8400 100644 --- a/packages/language/src/factory/expression.ts +++ b/packages/language/src/factory/expression.ts @@ -110,8 +110,12 @@ export class ReferenceExprFactory extends AstFactory { return this; } - addArg(builder: (a: ReferenceArgFactory) => ReferenceArgFactory) { - this.args.push(builder(new ReferenceArgFactory())); + addArg(builder: (a: ExpressionBuilder) => AstFactory, name?: string) { + const arg = new ReferenceArgFactory().setValue(builder); + if (name) { + arg.setName(name); + } + this.args.push(arg); this.update({ args: this.args, }); From 3cd05d1a4c9d8ebcc47867d373fa0f2f5ef36231 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Mon, 20 Oct 2025 21:58:40 +0200 Subject: [PATCH 10/29] fix: lint fix --- packages/cli/src/actions/pull/index.ts | 38 ++++++++------- .../src/actions/pull/provider/postgresql.ts | 48 ++----------------- packages/cli/src/actions/pull/utils.ts | 28 ++++++----- 3 files changed, 41 insertions(+), 73 deletions(-) diff --git a/packages/cli/src/actions/pull/index.ts b/packages/cli/src/actions/pull/index.ts index de06ca60..14c9b477 100644 --- a/packages/cli/src/actions/pull/index.ts +++ b/packages/cli/src/actions/pull/index.ts @@ -8,7 +8,7 @@ import { getAttributeRef, getDbName } from './utils'; export function syncEnums({ dbEnums, model, - options: options, + options, services, }: { dbEnums: IntrospectedEnum[]; @@ -23,18 +23,18 @@ export function syncEnums({ if (modified) factory.addAttribute((builder) => builder - .setDecl(getAttributeRef('@@map', services)!) + .setDecl(getAttributeRef('@@map', services)) .addArg((argBuilder) => argBuilder.StringLiteral.setValue(dbEnum.enum_type)), ); - dbEnum.values.map((v) => { + dbEnum.values.forEach((v) => { const { name, modified } = resolveNameCasing(options, v); factory.addField((builder) => { builder.setName(name); if (modified) builder.addAttribute((builder) => builder - .setDecl(getAttributeRef('@map', services)!) + .setDecl(getAttributeRef('@map', services)) .addArg((argBuilder) => argBuilder.StringLiteral.setValue(v)), ); @@ -46,7 +46,7 @@ export function syncEnums({ } function resolveNameCasing(options: PullOptions, originalName: string) { - let name: string; + let name = originalName; switch (options.naming) { case 'pascal': @@ -61,10 +61,6 @@ function resolveNameCasing(options: PullOptions, originalName: string) { case 'kebab': name = toKebabCase(originalName); break; - case 'none': - default: - name = originalName; - break; } return { @@ -188,7 +184,7 @@ export function syncTable({ typeBuilder.setArray(builtinType.isArray); typeBuilder.setOptional(column.nullable); - if (builtinType.type != 'Unsupported') { + if (builtinType.type !== 'Unsupported') { typeBuilder.setType(builtinType.type); } else { typeBuilder.setUnsupported((unsupportedBuilder) => @@ -246,8 +242,11 @@ export function syncTable({ modelFactory.addAttribute((builder) => builder.setDecl(modelIdAttribute).addArg((argBuilder) => { const arrayExpr = argBuilder.ArrayExpr; - pkColumns.map((c) => { - const ref = modelFactory.node.fields.find((f) => getDbName(f) === c)!; + pkColumns.forEach((c) => { + const ref = modelFactory.node.fields.find((f) => getDbName(f) === c); + if (!ref) { + throw new Error(`Field ${c} not found`); + } arrayExpr.addItem((itemBuilder) => itemBuilder.ReferenceExpr.setTarget(ref)); }); return arrayExpr; @@ -260,8 +259,11 @@ export function syncTable({ modelFactory.addAttribute((builder) => builder.setDecl(modelUniqueAttribute).addArg((argBuilder) => { const arrayExpr = argBuilder.ArrayExpr; - uniqieColumns.map((c) => { - const ref = modelFactory.node.fields.find((f) => getDbName(f) === c)!; + uniqieColumns.forEach((c) => { + const ref = modelFactory.node.fields.find((f) => getDbName(f) === c); + if (!ref) { + throw new Error(`Field ${c} not found`); + } arrayExpr.addItem((itemBuilder) => itemBuilder.ReferenceExpr.setTarget(ref)); }); return arrayExpr; @@ -296,9 +298,11 @@ export function syncTable({ .setDecl(index.unique ? modelUniqueAttribute : modelindexAttribute) .addArg((argBuilder) => { const arrayExpr = argBuilder.ArrayExpr; - index.columns.map((c) => { - const ref = modelFactory.node.fields.find((f) => getDbName(f) === c.name)!; - if (!ref) console.log(c, table.name); + index.columns.forEach((c) => { + const ref = modelFactory.node.fields.find((f) => getDbName(f) === c.name); + if (!ref) { + throw new Error(`Column ${c.name} not found in model ${table.name}`); + } arrayExpr.addItem((itemBuilder) => { const refExpr = itemBuilder.ReferenceExpr.setTarget(ref); if (c.order !== 'ASC') refExpr.addArg((ab) => ab.StringLiteral.setValue('DESC'), 'sort'); diff --git a/packages/cli/src/actions/pull/provider/postgresql.ts b/packages/cli/src/actions/pull/provider/postgresql.ts index 07dcee91..73428d37 100644 --- a/packages/cli/src/actions/pull/provider/postgresql.ts +++ b/packages/cli/src/actions/pull/provider/postgresql.ts @@ -61,48 +61,6 @@ export const postgresql: IntrospectionProvider = { case 'json': case 'jsonb': return { type: 'Json', isArray }; - - // unsupported or postgres-specific - case 'time': - case 'timetz': - case 'interval': - case 'money': - case 'xml': - case 'bit': - case 'varbit': - case 'cidr': - case 'inet': - case 'macaddr': - case 'macaddr8': - case 'point': - case 'line': - case 'lseg': - case 'box': - case 'path': - case 'polygon': - case 'circle': - case 'tsvector': - case 'tsquery': - case 'jsonpath': - case 'hstore': - case 'oid': - case 'name': - case 'regclass': - case 'regproc': - case 'regprocedure': - case 'regoper': - case 'regoperator': - case 'regtype': - case 'regconfig': - case 'regdictionary': - case 'pg_lsn': - case 'txid_snapshot': - case 'int4range': - case 'int8range': - case 'numrange': - case 'tsrange': - case 'tstzrange': - case 'daterange': default: return { type: 'Unsupported' as const, isArray }; } @@ -123,13 +81,13 @@ export const postgresql: IntrospectionProvider = { const val = defaultValue.trim(); const factories: DataFieldAttributeFactory[] = []; - const defaultAttr = new DataFieldAttributeFactory().setDecl(getAttributeRef('@default', services)!); + const defaultAttr = new DataFieldAttributeFactory().setDecl(getAttributeRef('@default', services)); if (val === 'CURRENT_TIMESTAMP' || val === 'now()') { - factories.push(defaultAttr.addArg((ab) => ab.InvocationExpr.setFunction(getFunctionRef('now', services)!))); + factories.push(defaultAttr.addArg((ab) => ab.InvocationExpr.setFunction(getFunctionRef('now', services)))); if (fieldName.toLowerCase() === 'updatedat' || fieldName.toLowerCase() === 'updated_at') { - factories.push(new DataFieldAttributeFactory().setDecl(getAttributeRef('@updatedAt', services)!)); + factories.push(new DataFieldAttributeFactory().setDecl(getAttributeRef('@updatedAt', services))); } return factories; } diff --git a/packages/cli/src/actions/pull/utils.ts b/packages/cli/src/actions/pull/utils.ts index 23462974..33a6ace3 100644 --- a/packages/cli/src/actions/pull/utils.ts +++ b/packages/cli/src/actions/pull/utils.ts @@ -1,11 +1,11 @@ import type { ZModelServices } from '@zenstackhq/language'; import { - AbstractDeclaration, - DataField, - DataModel, - Enum, - EnumField, - FunctionDecl, + type AbstractDeclaration, + type DataField, + type DataModel, + type Enum, + type EnumField, + type FunctionDecl, isInvocationExpr, type Attribute, type Model, @@ -15,8 +15,10 @@ import type { DataSourceProviderType } from '@zenstackhq/sdk/schema'; import type { Reference } from 'langium'; export function getAttribute(model: Model, attrName: string) { - const references = model.$document!.references as Reference[]; - return references.find((a) => a.ref!.$type === 'Attribute' && a.ref!.name === attrName)?.ref as + if (!model.$document) throw new Error('Model is not associated with a document.'); + + const references = model.$document.references as Reference[]; + return references.find((a) => a.ref?.$type === 'Attribute' && a.ref?.name === attrName)?.ref as | Attribute | undefined; } @@ -27,7 +29,9 @@ export function getDatasource(model: Model) { throw new Error('No datasource declaration found in the schema.'); } - const urlField = datasource.fields.find((f) => f.name === 'url')!; + const urlField = datasource.fields.find((f) => f.name === 'url'); + + if (!urlField) throw new Error(`No url field found in the datasource declaration.`); let url = getStringLiteral(urlField.value); @@ -73,9 +77,11 @@ export function getDeclarationRef( name: string, services: ZModelServices, ) { - return services.shared.workspace.IndexManager.allElements(type).find( + const node = services.shared.workspace.IndexManager.allElements(type).find( (m) => m.node && getDbName(m.node as T) === name, - )?.node as T | undefined; + )?.node; + if (!node) throw new Error(`Declaration not found: ${name}`); + return node as T; } export function getEnumRef(name: string, services: ZModelServices) { From b794a7c1e3a4da1b90c72a7ca87bc79a8a8036bb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Mon, 20 Oct 2025 21:59:50 +0200 Subject: [PATCH 11/29] fix: update zmodel code generator - include imports in output - fix indentaions - include comments in output --- .../language/src/zmodel-code-generator.ts | 46 ++++++++++++++----- 1 file changed, 34 insertions(+), 12 deletions(-) diff --git a/packages/language/src/zmodel-code-generator.ts b/packages/language/src/zmodel-code-generator.ts index 21993878..a5bb0a50 100644 --- a/packages/language/src/zmodel-code-generator.ts +++ b/packages/language/src/zmodel-code-generator.ts @@ -71,7 +71,7 @@ function gen(name: string) { */ export class ZModelCodeGenerator { private readonly options: ZModelCodeOptions; - + private readonly quote: string; constructor(options?: Partial) { this.options = { binaryExprNumberOfSpaces: options?.binaryExprNumberOfSpaces ?? 1, @@ -79,6 +79,7 @@ export class ZModelCodeGenerator { indent: options?.indent ?? 4, quote: options?.quote ?? 'single', }; + this.quote = this.options.quote === 'double' ? '"' : "'"; } /** @@ -92,9 +93,14 @@ export class ZModelCodeGenerator { return handler.value.call(this, ast); } + private quotedStr(val: string): string { + const trimmedVal = val.replace(new RegExp(`${this.quote}`, 'g'), `\\${this.quote}`); + return `${this.quote}${trimmedVal}${this.quote}`; + } + @gen(Model) private _generateModel(ast: Model) { - return ast.declarations.map((d) => this.generate(d)).join('\n\n'); + return `${ast.imports.map((d) => this.generate(d)).join('\n')}\n\n${ast.declarations.map((d) => this.generate(d)).join('\n\n')}`; } @gen(DataSource) @@ -106,16 +112,17 @@ ${ast.fields.map((x) => this.indent + this.generate(x)).join('\n')} @gen(ModelImport) private _generateModelImport(ast: ModelImport) { - return `import '${ast.path}'`; + return `import ${this.quotedStr(ast.path)}`; } @gen(Enum) private _generateEnum(ast: Enum) { return `enum ${ast.name} { -${ast.fields.map((x) => this.indent + this.generate(x)).join('\n')}${ast.attributes.length > 0 +${ast.fields.map((x) => this.indent + this.generate(x)).join('\n')}${ + ast.attributes.length > 0 ? '\n\n' + ast.attributes.map((x) => this.indent + this.generate(x)).join('\n') : '' - } + } }`; } @@ -135,7 +142,9 @@ ${ast.fields.map((x) => this.indent + this.generate(x)).join('\n')} @gen(ConfigField) private _generateConfigField(ast: ConfigField) { - return `${ast.name} = ${this.generate(ast.value)}`; + const longestName = Math.max(...ast.$container.fields.map((x) => x.name.length)); + const padding = ' '.repeat(longestName - ast.name.length + 1); + return `${ast.name}${padding}= ${this.generate(ast.value)}`; } @gen(ConfigArrayExpr) @@ -163,15 +172,24 @@ ${ast.fields.map((x) => this.indent + this.generate(x)).join('\n')} @gen(PluginField) private _generatePluginField(ast: PluginField) { - return `${ast.name} = ${this.generate(ast.value)}`; + const longestName = Math.max(...ast.$container.fields.map((x) => x.name.length)); + const padding = ' '.repeat(longestName - ast.name.length + 1); + return `${ast.name}${padding}= ${this.generate(ast.value)}`; } @gen(DataModel) private _generateDataModel(ast: DataModel) { - return `${ast.isView ? 'view' : 'model'} ${ast.name}${ + const comments = `${ast.comments.join('\n')}\n`; + + return `${ast.comments.length > 0 ? comments : ''}${ast.isView ? 'view' : 'model'} ${ast.name}${ ast.mixins.length > 0 ? ' mixes ' + ast.mixins.map((x) => x.$refText).join(', ') : '' } { -${ast.fields.map((x) => this.indent + this.generate(x)).join('\n')}${ +${ast.fields + .map((x) => { + const comments = x.comments.map((c) => `${this.indent}${c}`).join('\n'); + return (x.comments.length ? `${comments}\n` : '') + this.indent + this.generate(x); + }) + .join('\n')}${ ast.attributes.length > 0 ? '\n\n' + ast.attributes.map((x) => this.indent + this.generate(x)).join('\n') : '' @@ -181,7 +199,11 @@ ${ast.fields.map((x) => this.indent + this.generate(x)).join('\n')}${ @gen(DataField) private _generateDataField(ast: DataField) { - return `${ast.name} ${this.fieldType(ast.type)}${ + const longestFieldName = Math.max(...ast.$container.fields.map((f) => f.name.length)); + const longestType = Math.max(...ast.$container.fields.map((f) => this.fieldType(f.type).length)); + const paddingLeft = longestFieldName - ast.name.length; + const paddingRight = ast.attributes.length > 0 ? longestType - this.fieldType(ast.type).length : 0; + return `${ast.name}${' '.repeat(paddingLeft)} ${this.fieldType(ast.type)}${' '.repeat(paddingRight)}${ ast.attributes.length > 0 ? ' ' + ast.attributes.map((x) => this.generate(x)).join(' ') : '' }`; } @@ -235,7 +257,7 @@ ${ast.fields.map((x) => this.indent + this.generate(x)).join('\n')}${ @gen(StringLiteral) private _generateLiteralExpr(ast: LiteralExpr) { - return this.options.quote === 'single' ? `'${ast.value}'` : `"${ast.value}"`; + return this.quotedStr(ast.value as string); } @gen(NumberLiteral) @@ -278,7 +300,7 @@ ${ast.fields.map((x) => this.indent + this.generate(x)).join('\n')}${ @gen(ReferenceArg) private _generateReferenceArg(ast: ReferenceArg) { - return `${ast.name}:${this.generate(ast.value)}`; + return `${ast.name}: ${this.generate(ast.value)}`; } @gen(MemberAccessExpr) From 33ecb33287fd4ed5896c69b982750362ab7ee193 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Tue, 21 Oct 2025 00:31:16 +0200 Subject: [PATCH 12/29] feat: add exclude schemas option --- packages/cli/src/actions/db.ts | 7 +++++-- packages/cli/src/actions/pull/index.ts | 15 +++++++++++++++ packages/cli/src/index.ts | 3 ++- 3 files changed, 22 insertions(+), 3 deletions(-) diff --git a/packages/cli/src/actions/db.ts b/packages/cli/src/actions/db.ts index c9a3ef4c..6fe4351c 100644 --- a/packages/cli/src/actions/db.ts +++ b/packages/cli/src/actions/db.ts @@ -20,12 +20,13 @@ export type PullOptions = { out?: string; naming?: 'pascal' | 'camel' | 'snake' | 'kebab' | 'none'; alwaysMap?: boolean; + excludeSchemas: string[]; }; /** * CLI action for db related commands */ -export async function run(command: string, options: PushOptions) { +export async function run(command: string, options: any) { switch (command) { case 'push': await runPush(options); @@ -89,7 +90,9 @@ async function runPull(options: PullOptions) { throw new Error(`No introspection provider found for: ${datasource.provider}`); } - const { enums, tables } = await provider.introspect(datasource.url); + const { enums: allEnums, tables: allTables } = await provider.introspect(datasource.url); + const enums = allEnums.filter((e) => !options.excludeSchemas.includes(e.schema_name)); + const tables = allTables.filter((t) => !options.excludeSchemas.includes(t.schema)); const newModel: Model = { $type: 'Model', diff --git a/packages/cli/src/actions/pull/index.ts b/packages/cli/src/actions/pull/index.ts index 14c9b477..4b070a47 100644 --- a/packages/cli/src/actions/pull/index.ts +++ b/packages/cli/src/actions/pull/index.ts @@ -41,6 +41,15 @@ export function syncEnums({ return builder; }); }); + + if (dbEnum.schema_name && dbEnum.schema_name != '' && dbEnum.schema_name !== 'public') { + factory.addAttribute((b) => + b + .setDecl(getAttributeRef('@@schema', services)) + .addArg((a) => a.StringLiteral.setValue(dbEnum.schema_name)), + ); + } + model.declarations.push(factory.get({ $container: model })); } } @@ -316,6 +325,12 @@ export function syncTable({ ); }); + if (table.schema && table.schema != '' && table.schema !== 'public') { + modelFactory.addAttribute((b) => + b.setDecl(getAttributeRef('@@schema', services)).addArg((a) => a.StringLiteral.setValue(table.schema)), + ); + } + model.declarations.push(modelFactory.node); return relations; diff --git a/packages/cli/src/index.ts b/packages/cli/src/index.ts index 7e2e5c19..0774a4e3 100644 --- a/packages/cli/src/index.ts +++ b/packages/cli/src/index.ts @@ -142,7 +142,8 @@ function createProgram() { .description('Introspect your database.') .addOption(schemaOption) .addOption(noVersionCheckOption) - .addOption(new Option('--out ', 'add custom output path for the introspected schema')) + .addOption(new Option('-e, --exclude-schemas ', 'exclude specific schemas from introspection')) + .addOption(new Option('-o, --out ', 'add custom output path for the introspected schema')) .action((options) => dbAction('pull', options)); dbCommand From 58922da9143ef6f5e14bd1dfe4c1014d9c871a19 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Tue, 21 Oct 2025 00:40:40 +0200 Subject: [PATCH 13/29] feat: implement initial diff update --- packages/cli/src/actions/db.ts | 93 +++++++++++++++++++++++--- packages/cli/src/actions/pull/index.ts | 30 ++++++--- 2 files changed, 104 insertions(+), 19 deletions(-) diff --git a/packages/cli/src/actions/db.ts b/packages/cli/src/actions/db.ts index 6fe4351c..682cfc53 100644 --- a/packages/cli/src/actions/db.ts +++ b/packages/cli/src/actions/db.ts @@ -1,4 +1,4 @@ -import type { Model } from '@zenstackhq/language/ast'; +import { Model, Enum, DataModel } from '@zenstackhq/language/ast'; import { ZModelCodeGenerator } from '@zenstackhq/sdk'; import fs from 'node:fs'; import path from 'node:path'; @@ -6,7 +6,7 @@ import { execPrisma } from '../utils/exec-utils'; import { generateTempPrismaSchema, getSchemaFile, handleSubProcessError, requireDataSourceUrl, loadSchemaDocumentWithServices } from './action-utils'; import { syncEnums, syncRelation, syncTable, type Relation } from './pull'; import { providers } from './pull/provider'; -import { getDatasource } from './pull/utils'; +import { getDatasource, getDbName } from './pull/utils'; import { config } from '@dotenvx/dotenvx'; type PushOptions = { @@ -115,18 +115,93 @@ async function runPull(options: PullOptions) { syncRelation({ model: newModel, relation, services, options }); } - //TODO: diff models and apply changes only + const cwd = new URL(`file://${process.cwd()}`).pathname; + const docs = services.shared.workspace.LangiumDocuments.all + .filter(({ uri }) => uri.path.toLowerCase().startsWith(cwd.toLowerCase())) + .toArray(); + const docsSet = new Set(docs.map((d) => d.uri.toString())); + console.log(docsSet); + newModel.declarations + .filter((d) => [DataModel, Enum].includes(d.$type)) + .forEach((_declaration) => { + const declaration = _declaration as DataModel | Enum; + const declarations = services.shared.workspace.IndexManager.allElements(declaration.$type, docsSet); + const originalModel = declarations.find((d) => getDbName(d.node as any) === getDbName(declaration)) + ?.node as DataModel | Enum | undefined; + if (!originalModel) { + model.declarations.push(declaration); + (declaration as any).$container = model; + return; + } + + declaration.fields.forEach((f) => { + const originalField = originalModel.fields.find((d) => getDbName(d) === getDbName(f)); + + if (!originalField) { + console.log(`Added field ${f.name} to ${originalModel.name}`); + (f as any).$container = originalModel; + originalModel.fields.push(f as any); + return; + } + //TODO: update field + }); + originalModel.fields + .filter((f) => !declaration.fields.find((d) => getDbName(d) === getDbName(f))) + .forEach((f) => { + const model = f.$container; + const index = model.fields.findIndex((d) => d === f); + model.fields.splice(index, 1); + console.log(`Delete field ${f.name}`); + }); + }); + + services.shared.workspace.IndexManager.allElements('DataModel', docsSet) + .filter( + (declaration) => + !newModel.declarations.find((d) => getDbName(d) === getDbName(declaration.node as any)), + ) + .forEach((decl) => { + const model = decl.node!.$container as Model; + const index = model.declarations.findIndex((d) => d === decl.node); + model.declarations.splice(index, 1); + console.log(`Delete model ${decl.name}`); + }); + services.shared.workspace.IndexManager.allElements('Enum', docsSet) + .filter( + (declaration) => + !newModel.declarations.find((d) => getDbName(d) === getDbName(declaration.node as any)), + ) + .forEach((decl) => { + const model = decl.node!.$container as Model; + const index = model.declarations.findIndex((d) => d === decl.node); + model.declarations.splice(index, 1); + console.log(`Delete enum ${decl.name}`); + }); + + if (options.out && !fs.lstatSync(options.out).isFile()) { + throw new Error(`Output path ${options.out} is not a file`); + } - const generator = new ZModelCodeGenerator(); + const generator = new ZModelCodeGenerator({ + //TODO: make configurable + quote: 'double', + }); - const zmodelSchema = generator.generate(newModel); + if (options.out) { + const zmodelSchema = generator.generate(newModel); - console.log(options.out ? `Writing to ${options.out}` : schemaFile); + console.log(`Writing to ${options.out}`); - const outPath = options.out ? path.resolve(options.out) : schemaFile; - console.log(outPath); + const outPath = options.out ? path.resolve(options.out) : schemaFile; - fs.writeFileSync(outPath, zmodelSchema); + fs.writeFileSync(outPath, zmodelSchema); + } else { + docs.forEach(({ uri, parseResult: { value: model } }) => { + const zmodelSchema = generator.generate(model); + console.log(`Writing to ${uri.path}`); + fs.writeFileSync(uri.fsPath, zmodelSchema); + }); + } } catch (error) { console.log(error); throw error; diff --git a/packages/cli/src/actions/pull/index.ts b/packages/cli/src/actions/pull/index.ts index 4b070a47..2a6a18d8 100644 --- a/packages/cli/src/actions/pull/index.ts +++ b/packages/cli/src/actions/pull/index.ts @@ -42,12 +42,17 @@ export function syncEnums({ }); }); - if (dbEnum.schema_name && dbEnum.schema_name != '' && dbEnum.schema_name !== 'public') { - factory.addAttribute((b) => - b - .setDecl(getAttributeRef('@@schema', services)) - .addArg((a) => a.StringLiteral.setValue(dbEnum.schema_name)), - ); + try { + if (dbEnum.schema_name && dbEnum.schema_name !== '' && dbEnum.schema_name !== 'public') { + factory.addAttribute((b) => + b + .setDecl(getAttributeRef('@@schema', services)) + .addArg((a) => a.StringLiteral.setValue(dbEnum.schema_name)), + ); + } + } catch (_error: unknown) { + //Waiting to support multi-schema + //TODO: remove catch after multi-schema support is implemented } model.declarations.push(factory.get({ $container: model })); @@ -325,10 +330,15 @@ export function syncTable({ ); }); - if (table.schema && table.schema != '' && table.schema !== 'public') { - modelFactory.addAttribute((b) => - b.setDecl(getAttributeRef('@@schema', services)).addArg((a) => a.StringLiteral.setValue(table.schema)), - ); + try { + if (table.schema && table.schema !== '' && table.schema !== 'public') { + modelFactory.addAttribute((b) => + b.setDecl(getAttributeRef('@@schema', services)).addArg((a) => a.StringLiteral.setValue(table.schema)), + ); + } + } catch (_error: unknown) { + //Waiting to support multi-schema + //TODO: remove catch after multi-schema support is implemented } model.declarations.push(modelFactory.node); From 4980be879a58f1af318973f81dc1fa7b1b799f04 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Tue, 21 Oct 2025 00:40:56 +0200 Subject: [PATCH 14/29] fix: update format in zmodel code generator --- packages/language/src/zmodel-code-generator.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/language/src/zmodel-code-generator.ts b/packages/language/src/zmodel-code-generator.ts index a5bb0a50..91e1dc7c 100644 --- a/packages/language/src/zmodel-code-generator.ts +++ b/packages/language/src/zmodel-code-generator.ts @@ -100,7 +100,7 @@ export class ZModelCodeGenerator { @gen(Model) private _generateModel(ast: Model) { - return `${ast.imports.map((d) => this.generate(d)).join('\n')}\n\n${ast.declarations.map((d) => this.generate(d)).join('\n\n')}`; + return `${ast.imports.map((d) => this.generate(d)).join('\n')}${ast.imports.length > 0 ? '\n\n' : ''}${ast.declarations.map((d) => this.generate(d)).join('\n\n')}`; } @gen(DataSource) From 05eb61e945beee3965dc311169e9125617903e01 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Tue, 21 Oct 2025 00:41:55 +0200 Subject: [PATCH 15/29] fix: typo --- packages/cli/src/actions/pull/index.ts | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/cli/src/actions/pull/index.ts b/packages/cli/src/actions/pull/index.ts index 2a6a18d8..56fc8545 100644 --- a/packages/cli/src/actions/pull/index.ts +++ b/packages/cli/src/actions/pull/index.ts @@ -268,12 +268,12 @@ export function syncTable({ ); } - const uniqieColumns = table.columns.filter((c) => c.unique && !c.pk).map((c) => c.name); - if (uniqieColumns.length > 0) { + const uniqueColumns = table.columns.filter((c) => c.unique && !c.pk).map((c) => c.name); + if (uniqueColumns.length > 0) { modelFactory.addAttribute((builder) => builder.setDecl(modelUniqueAttribute).addArg((argBuilder) => { const arrayExpr = argBuilder.ArrayExpr; - uniqieColumns.forEach((c) => { + uniqueColumns.forEach((c) => { const ref = modelFactory.node.fields.find((f) => getDbName(f) === c); if (!ref) { throw new Error(`Field ${c} not found`); From 5e8a159e3c852474b72cdca7a4010d1eb4b1e254 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Wed, 22 Oct 2025 00:59:38 +0200 Subject: [PATCH 16/29] feat: progress on database introspection and syncing --- packages/cli/src/actions/db.ts | 171 ++++++++++++++---- packages/cli/src/actions/pull/index.ts | 143 ++++++++++----- .../src/actions/pull/provider/postgresql.ts | 84 +++++++-- .../cli/src/actions/pull/provider/provider.ts | 2 + .../cli/src/actions/pull/provider/sqlite.ts | 2 + packages/cli/src/actions/pull/utils.ts | 33 +++- packages/language/res/stdlib.zmodel | 6 +- .../language/src/zmodel-code-generator.ts | 5 +- 8 files changed, 345 insertions(+), 101 deletions(-) diff --git a/packages/cli/src/actions/db.ts b/packages/cli/src/actions/db.ts index 682cfc53..8da32319 100644 --- a/packages/cli/src/actions/db.ts +++ b/packages/cli/src/actions/db.ts @@ -1,4 +1,4 @@ -import { Model, Enum, DataModel } from '@zenstackhq/language/ast'; +import { Model, Enum, DataModel, DataField } from '@zenstackhq/language/ast'; import { ZModelCodeGenerator } from '@zenstackhq/sdk'; import fs from 'node:fs'; import path from 'node:path'; @@ -6,7 +6,7 @@ import { execPrisma } from '../utils/exec-utils'; import { generateTempPrismaSchema, getSchemaFile, handleSubProcessError, requireDataSourceUrl, loadSchemaDocumentWithServices } from './action-utils'; import { syncEnums, syncRelation, syncTable, type Relation } from './pull'; import { providers } from './pull/provider'; -import { getDatasource, getDbName } from './pull/utils'; +import { getDatasource, getDbName, getRelationFkName } from './pull/utils'; import { config } from '@dotenvx/dotenvx'; type PushOptions = { @@ -20,7 +20,7 @@ export type PullOptions = { out?: string; naming?: 'pascal' | 'camel' | 'snake' | 'kebab' | 'none'; alwaysMap?: boolean; - excludeSchemas: string[]; + excludeSchemas?: string[]; }; /** @@ -91,8 +91,8 @@ async function runPull(options: PullOptions) { } const { enums: allEnums, tables: allTables } = await provider.introspect(datasource.url); - const enums = allEnums.filter((e) => !options.excludeSchemas.includes(e.schema_name)); - const tables = allTables.filter((t) => !options.excludeSchemas.includes(t.schema)); + const enums = allEnums.filter((e) => !options.excludeSchemas?.includes(e.schema_name)); + const tables = allTables.filter((t) => !options.excludeSchemas?.includes(t.schema)); const newModel: Model = { $type: 'Model', @@ -112,7 +112,28 @@ async function runPull(options: PullOptions) { } for (const relation of resolvedRelations) { - syncRelation({ model: newModel, relation, services, options }); + const simmilarRelations = resolvedRelations.filter((rr) => { + return ( + (rr.schema === relation.schema && + rr.table === relation.table && + rr.references.schema === relation.references.schema && + rr.references.table === relation.references.table) || + (rr.schema === relation.references.schema && + rr.column === relation.references.column && + rr.references.schema === relation.schema && + rr.references.table === relation.table) + ); + }).length; + const selfRelation = + relation.references.schema === relation.schema && relation.references.table === relation.table; + syncRelation({ + model: newModel, + relation, + services, + options, + selfRelation, + simmilarRelations, + }); } const cwd = new URL(`file://${process.cwd()}`).pathname; @@ -120,64 +141,141 @@ async function runPull(options: PullOptions) { .filter(({ uri }) => uri.path.toLowerCase().startsWith(cwd.toLowerCase())) .toArray(); const docsSet = new Set(docs.map((d) => d.uri.toString())); - console.log(docsSet); + + services.shared.workspace.IndexManager.allElements('DataModel', docsSet) + .filter( + (declaration) => + !newModel.declarations.find((d) => getDbName(d) === getDbName(declaration.node as any)), + ) + .forEach((decl) => { + const model = decl.node!.$container as Model; + const index = model.declarations.findIndex((d) => d === decl.node); + model.declarations.splice(index, 1); + console.log(`Delete model ${decl.name}`); + }); + services.shared.workspace.IndexManager.allElements('Enum', docsSet) + .filter( + (declaration) => + !newModel.declarations.find((d) => getDbName(d) === getDbName(declaration.node as any)), + ) + .forEach((decl) => { + const model = decl.node!.$container as Model; + const index = model.declarations.findIndex((d) => d === decl.node); + model.declarations.splice(index, 1); + console.log(`Delete enum ${decl.name}`); + }); + newModel.declarations .filter((d) => [DataModel, Enum].includes(d.$type)) .forEach((_declaration) => { const declaration = _declaration as DataModel | Enum; - const declarations = services.shared.workspace.IndexManager.allElements(declaration.$type, docsSet); + const declarations = services.shared.workspace.IndexManager.allElements( + declaration.$type, + docsSet, + ).toArray(); const originalModel = declarations.find((d) => getDbName(d.node as any) === getDbName(declaration)) ?.node as DataModel | Enum | undefined; if (!originalModel) { model.declarations.push(declaration); (declaration as any).$container = model; + declaration.fields.forEach((f) => { + if (f.$type === 'DataField' && f.type.reference?.ref) { + const ref = declarations.find( + (d) => getDbName(d.node as any) === getDbName(f.type.reference!.ref as any), + )?.node; + if (ref) (f.type.reference.ref as any) = ref; + } + }); return; } declaration.fields.forEach((f) => { - const originalField = originalModel.fields.find((d) => getDbName(d) === getDbName(f)); + const originalField = originalModel.fields.find( + (d) => + getDbName(d) === getDbName(f) || + (getRelationFkName(d as any) === getRelationFkName(f as any) && + !!getRelationFkName(d as any) && + !!getRelationFkName(f as any)), + ); if (!originalField) { - console.log(`Added field ${f.name} to ${originalModel.name}`); + //console.log(`Added field ${f.name} to ${originalModel.name}`); (f as any).$container = originalModel; originalModel.fields.push(f as any); + if (f.$type === 'DataField' && f.type.reference?.ref) { + const ref = declarations.find( + (d) => getDbName(d.node as any) === getDbName(f.type.reference!.ref as any), + )?.node as DataModel | undefined; + if (ref) { + (f.type.reference.$refText as any) = ref.name; + (f.type.reference.ref as any) = ref; + } + } return; } - //TODO: update field + + if (originalField.$type === 'DataField') { + const field = f as DataField; + originalField.type = field.type; + if (field.type.reference) { + const ref = declarations.find( + (d) => getDbName(d.node as any) === getDbName(field.type.reference!.ref as any), + )?.node as DataModel | undefined; + if (ref) { + (field.type.reference.$refText as any) = ref.name; + (field.type.reference.ref as any) = ref; + } + } + + (originalField.type.$container as any) = originalField; + } + + f.attributes.forEach((attr) => { + const originalAttribute = originalField.attributes.find( + (d) => d.decl.$refText === attr.decl.$refText, + ); + + if (!originalAttribute) { + //console.log(`Added Attribute ${attr.decl.$refText} to ${f.name}`); + (f as any).$container = originalField; + originalField.attributes.push(attr as any); + return; + } + + originalAttribute.args = attr.args; + attr.args.forEach((a) => { + (a.$container as any) = originalAttribute; + }); + }); + + originalField.attributes + .filter((attr) => !f.attributes.find((d) => d.decl.$refText === attr.decl.$refText)) + .forEach((attr) => { + const field = attr.$container; + const index = field.attributes.findIndex((d) => d === attr); + field.attributes.splice(index, 1); + //console.log(`Delete attribute from field:${field.name} ${attr.decl.$refText}`); + }); }); originalModel.fields - .filter((f) => !declaration.fields.find((d) => getDbName(d) === getDbName(f))) + .filter( + (f) => + !declaration.fields.find( + (d) => + getDbName(d) === getDbName(f) || + (getRelationFkName(d as any) === getRelationFkName(f as any) && + !!getRelationFkName(d as any) && + !!getRelationFkName(f as any)), + ), + ) .forEach((f) => { const model = f.$container; const index = model.fields.findIndex((d) => d === f); model.fields.splice(index, 1); - console.log(`Delete field ${f.name}`); + //console.log(`Delete field ${f.name}`); }); }); - services.shared.workspace.IndexManager.allElements('DataModel', docsSet) - .filter( - (declaration) => - !newModel.declarations.find((d) => getDbName(d) === getDbName(declaration.node as any)), - ) - .forEach((decl) => { - const model = decl.node!.$container as Model; - const index = model.declarations.findIndex((d) => d === decl.node); - model.declarations.splice(index, 1); - console.log(`Delete model ${decl.name}`); - }); - services.shared.workspace.IndexManager.allElements('Enum', docsSet) - .filter( - (declaration) => - !newModel.declarations.find((d) => getDbName(d) === getDbName(declaration.node as any)), - ) - .forEach((decl) => { - const model = decl.node!.$container as Model; - const index = model.declarations.findIndex((d) => d === decl.node); - model.declarations.splice(index, 1); - console.log(`Delete enum ${decl.name}`); - }); - if (options.out && !fs.lstatSync(options.out).isFile()) { throw new Error(`Output path ${options.out} is not a file`); } @@ -185,6 +283,7 @@ async function runPull(options: PullOptions) { const generator = new ZModelCodeGenerator({ //TODO: make configurable quote: 'double', + indent: 2, }); if (options.out) { diff --git a/packages/cli/src/actions/pull/index.ts b/packages/cli/src/actions/pull/index.ts index 56fc8545..c857fe16 100644 --- a/packages/cli/src/actions/pull/index.ts +++ b/packages/cli/src/actions/pull/index.ts @@ -1,9 +1,15 @@ import type { ZModelServices } from '@zenstackhq/language'; -import { isEnum, type DataField, type DataModel, type Enum, type Model } from '@zenstackhq/language/ast'; -import { DataFieldFactory, DataModelFactory, EnumFactory } from '@zenstackhq/language/factory'; +import { isEnum, type DataField, type DataModel, type Enum, type Model, Attribute } from '@zenstackhq/language/ast'; +import { + DataFieldFactory, + DataModelFactory, + EnumFactory, + ModelFactory, + DataFieldAttributeFactory, +} from '@zenstackhq/language/factory'; import type { PullOptions } from '../db'; -import type { IntrospectedEnum, IntrospectedTable, IntrospectionProvider } from './provider'; -import { getAttributeRef, getDbName } from './utils'; +import type { IntrospectedEnum, IntrospectedTable, IntrospectionProvider, Cascade } from './provider'; +import { getAttributeRef, getDbName, getEnumRef } from './utils'; export function syncEnums({ dbEnums, @@ -111,6 +117,8 @@ export type Relation = { column: string; type: 'one' | 'many'; fk_name: string; + foreign_key_on_update: Cascade; + foreign_key_on_delete: Cascade; nullable: boolean; references: { schema: string | null; @@ -176,6 +184,8 @@ export function syncTable({ column: column.name, type: 'one', fk_name: column.foreign_key_name!, + foreign_key_on_delete: column.foreign_key_on_delete, + foreign_key_on_update: column.foreign_key_on_update, nullable: column.nullable, references: { schema: column.foreign_key_schema, @@ -187,8 +197,9 @@ export function syncTable({ } const fieldPrefix = /[0-9]/g.test(column.name.charAt(0)) ? '_' : ''; - const { name: _name, modified } = resolveNameCasing(options, column.name); + const { name: _name, modified: _modified } = resolveNameCasing(options, column.name); const name = `${fieldPrefix}${_name}`; + const modified = fieldPrefix !== '' || _modified; const builtinType = provider.getBuiltinType(column.datatype); @@ -198,21 +209,22 @@ export function syncTable({ typeBuilder.setArray(builtinType.isArray); typeBuilder.setOptional(column.nullable); - if (builtinType.type !== 'Unsupported') { - typeBuilder.setType(builtinType.type); - } else { - typeBuilder.setUnsupported((unsupportedBuilder) => - unsupportedBuilder.setValue((lt) => lt.StringLiteral.setValue(column.datatype)), - ); - } - if (column.options.length > 0) { const ref = model.declarations.find((d) => isEnum(d) && getDbName(d) === column.datatype) as | Enum | undefined; - if (ref) { - typeBuilder.setReference(ref); + if (!ref) { + throw new Error(`Enum ${column.datatype} not found`); + } + typeBuilder.setReference(ref); + } else { + if (builtinType.type !== 'Unsupported') { + typeBuilder.setType(builtinType.type); + } else { + typeBuilder.setUnsupported((unsupportedBuilder) => + unsupportedBuilder.setValue((lt) => lt.StringLiteral.setValue(column.datatype)), + ); } } @@ -220,14 +232,12 @@ export function syncTable({ }); if (column.default) { - const defaultValuesAttrs = column.default - ? provider.getDefaultValue({ - fieldName: column.name, - defaultValue: column.default, - services, - enums: model.declarations.filter((d) => d.$type === 'Enum') as Enum[], - }) - : []; + const defaultValuesAttrs = provider.getDefaultValue({ + fieldName: column.name, + defaultValue: column.default, + services, + enums: model.declarations.filter((d) => d.$type === 'Enum') as Enum[], + }); defaultValuesAttrs.forEach(builder.addAttribute.bind(builder)); } @@ -235,17 +245,31 @@ export function syncTable({ builder.addAttribute((b) => b.setDecl(idAttribute)); } - if (column.unique) + if (column.unique && !column.pk) { builder.addAttribute((b) => { b.setDecl(uniqueAttribute); if (column.unique_name) b.addArg((ab) => ab.StringLiteral.setValue(column.unique_name!), 'map'); return b; }); - if (modified) + } + if (modified) { builder.addAttribute((ab) => - ab.setDecl(fieldMapAttribute).addArg((ab) => ab.StringLiteral.setValue(column.name), 'name'), + ab.setDecl(fieldMapAttribute).addArg((ab) => ab.StringLiteral.setValue(column.name)), ); + } + + const dbAttr = services.shared.workspace.IndexManager.allElements('Attribute').find( + (d) => d.name.toLowerCase() === `@db.${column.datatype.toLowerCase()}`, + )?.node as Attribute | undefined; + //TODO: exclude default types like text in postgres + //because Zenstack string = text in postgres so unnecessary to map to default types + if (dbAttr && !['text'].includes(column.datatype)) { + const dbAttrFactory = new DataFieldAttributeFactory().setDecl(dbAttr); + if (column.length || column.precision) + dbAttrFactory.addArg((a) => a.NumberLiteral.setValue(column.length! || column.precision!)); + builder.addAttribute(dbAttrFactory); + } return builder; }); @@ -283,6 +307,11 @@ export function syncTable({ return arrayExpr; }), ); + } else { + modelFactory.addAttribute((a) => a.setDecl(getAttributeRef('@@ignore', services))); + modelFactory.comments.push( + '/// The underlying table does not contain a valid unique identifier and can therefore currently not be handled by Zenstack Client.', + ); } table.indexes.forEach((index) => { @@ -337,8 +366,8 @@ export function syncTable({ ); } } catch (_error: unknown) { - //Waiting to support multi-schema - //TODO: remove catch after multi-schema support is implemented + //Waiting to support multi-schema + //TODO: remove catch after multi-schema support is implemented } model.declarations.push(modelFactory.node); @@ -350,11 +379,16 @@ export function syncRelation({ model, relation, services, + selfRelation, + simmilarRelations, }: { model: Model; relation: Relation; services: ZModelServices; options: PullOptions; + //self included + simmilarRelations: number; + selfRelation: boolean; }) { const idAttribute = getAttributeRef('@id', services); const uniqueAttribute = getAttributeRef('@unique', services); @@ -362,6 +396,8 @@ export function syncRelation({ const fieldMapAttribute = getAttributeRef('@map', services); const tableMapAttribute = getAttributeRef('@@map', services); + const includeRelationName = selfRelation || simmilarRelations > 1; + if (!idAttribute || !uniqueAttribute || !relationAttribute || !fieldMapAttribute || !tableMapAttribute) { throw new Error('Cannot find required attributes in the model.'); } @@ -382,11 +418,9 @@ export function syncRelation({ const targetField = targetModel.fields.find((f) => getDbName(f) === relation.references.column); if (!targetField) return; - //TODO: Finish relation sync - const fieldPrefix = /[0-9]/g.test(sourceModel.name.charAt(0)) ? '_' : ''; - const relationName = `${sourceModel.name}_${relation.column}To${targetModel.name}_${relation.references.column}`; + const relationName = `${relation.table}${simmilarRelations > 1 ? `_${relation.column}` : ''}To${relation.references.table}`; let sourceFieldName = `${fieldPrefix}${sourceModel.name.charAt(0).toLowerCase()}${sourceModel.name.slice(1)}_${relation.column}`; if (sourceModel.fields.find((f) => f.name === sourceFieldName)) { @@ -402,14 +436,38 @@ export function syncRelation({ .setArray(relation.type === 'many') .setReference(targetModel), ) - .addAttribute((ab) => - ab - .setDecl(relationAttribute) - .addArg((ab) => ab.StringLiteral.setValue(relationName)) - .addArg((ab) => ab.ArrayExpr.addItem((aeb) => aeb.ReferenceExpr.setTarget(sourceField)), 'fields') - .addArg((ab) => ab.ArrayExpr.addItem((aeb) => aeb.ReferenceExpr.setTarget(targetField)), 'references') - .addArg((ab) => ab.StringLiteral.setValue(relation.fk_name), 'map'), - ); + .addAttribute((ab) => { + ab.setDecl(relationAttribute); + if (includeRelationName) ab.addArg((ab) => ab.StringLiteral.setValue(relationName)); + ab.addArg((ab) => ab.ArrayExpr.addItem((aeb) => aeb.ReferenceExpr.setTarget(sourceField)), 'fields').addArg( + (ab) => ab.ArrayExpr.addItem((aeb) => aeb.ReferenceExpr.setTarget(targetField)), + 'references', + ); + + if (relation.foreign_key_on_delete && relation.foreign_key_on_delete !== 'SET NULL') { + const enumRef = getEnumRef('ReferentialAction', services); + if (!enumRef) throw new Error('ReferentialAction enum not found'); + const enumFieldRef = enumRef.fields.find( + (f) => f.name.toLowerCase() === relation.foreign_key_on_delete!.replace(/ /g, '').toLowerCase(), + ); + if (!enumFieldRef) throw new Error(`ReferentialAction ${relation.foreign_key_on_delete} not found`); + ab.addArg((a) => a.ReferenceExpr.setTarget(enumFieldRef), 'onDelete'); + } + + if (relation.foreign_key_on_update && relation.foreign_key_on_update !== 'SET NULL') { + const enumRef = getEnumRef('ReferentialAction', services); + if (!enumRef) throw new Error('ReferentialAction enum not found'); + const enumFieldRef = enumRef.fields.find( + (f) => f.name.toLowerCase() === relation.foreign_key_on_update!.replace(/ /g, '').toLowerCase(), + ); + if (!enumFieldRef) throw new Error(`ReferentialAction ${relation.foreign_key_on_update} not found`); + ab.addArg((a) => a.ReferenceExpr.setTarget(enumFieldRef), 'onUpdate'); + } + + ab.addArg((ab) => ab.StringLiteral.setValue(relation.fk_name), 'map'); + + return ab; + }); sourceModel.fields.push(sourceFieldFactory.node); @@ -427,8 +485,11 @@ export function syncRelation({ .setOptional(relation.references.type === 'one') .setArray(relation.references.type === 'many') .setReference(sourceModel), - ) - .addAttribute((ab) => ab.setDecl(relationAttribute).addArg((ab) => ab.StringLiteral.setValue(relationName))); + ); + if (includeRelationName) + targetFieldFactory.addAttribute((ab) => + ab.setDecl(relationAttribute).addArg((ab) => ab.StringLiteral.setValue(relationName)), + ); targetModel.fields.push(targetFieldFactory.node); } diff --git a/packages/cli/src/actions/pull/provider/postgresql.ts b/packages/cli/src/actions/pull/provider/postgresql.ts index 73428d37..30bb2602 100644 --- a/packages/cli/src/actions/pull/provider/postgresql.ts +++ b/packages/cli/src/actions/pull/provider/postgresql.ts @@ -1,7 +1,7 @@ +import { DataFieldAttributeFactory } from '@zenstackhq/language/factory'; import { Client } from 'pg'; import { getAttributeRef, getDbName, getFunctionRef } from '../utils'; import type { IntrospectedEnum, IntrospectedSchema, IntrospectedTable, IntrospectionProvider } from './provider'; -import { DataFieldAttributeFactory } from '@zenstackhq/language/factory'; export const postgresql: IntrospectionProvider = { getBuiltinType(type) { @@ -49,6 +49,7 @@ export const postgresql: IntrospectionProvider = { // dates/times case 'date': + case 'time': case 'timestamp': case 'timestamptz': return { type: 'DateTime', isArray }; @@ -91,24 +92,63 @@ export const postgresql: IntrospectionProvider = { } return factories; } + if (val.startsWith('nextval(')) { + factories.push( + defaultAttr.addArg((ab) => ab.InvocationExpr.setFunction(getFunctionRef('autoincrement', services))), + ); + return factories; + } + if (val.includes('(') && val.includes(')')) { + factories.push( + defaultAttr.addArg((a) => + a.InvocationExpr.setFunction(getFunctionRef('dbgenerated', services)).addArg((a) => + a.setValue((v) => v.StringLiteral.setValue(val)), + ), + ), + ); + return factories; + } if (val.includes('::')) { - const [enumValue, enumName] = val - .replace(/'|"/g, '') + const [value, type] = val + .replace(/'/g, '') .split('::') .map((s) => s.trim()) as [string, string]; - const enumDef = enums.find((e) => getDbName(e) === enumName); - if (!enumDef) { - return []; - } - const enumField = enumDef.fields.find((v) => getDbName(v) === enumValue); - if (!enumField) { - throw new Error( - `Enum value ${enumValue} not found in enum ${enumName} for default value ${defaultValue}`, - ); + switch (type) { + case 'character varying': + case 'uuid': + case 'json': + case 'jsonb': + if (value === 'NULL') return []; + factories.push(defaultAttr.addArg((a) => a.StringLiteral.setValue(value))); + break; + case 'real': + factories.push(defaultAttr.addArg((a) => a.NumberLiteral.setValue(value))); + break; + default: { + const enumDef = enums.find((e) => getDbName(e, true) === type); + if (!enumDef) { + factories.push( + defaultAttr.addArg((a) => + a.InvocationExpr.setFunction(getFunctionRef('dbgenerated', services)).addArg((a) => + a.setValue((v) => v.StringLiteral.setValue(val)), + ), + ), + ); + break; + } + const enumField = enumDef.fields.find((v) => getDbName(v) === value); + if (!enumField) { + throw new Error( + `Enum value ${value} not found in enum ${type} for default value ${defaultValue}`, + ); + } + + factories.push(defaultAttr.addArg((ab) => ab.ReferenceExpr.setTarget(enumField))); + break; + } } - factories.push(defaultAttr.addArg((ab) => ab.ReferenceExpr.setTarget(enumField))); return factories; } @@ -161,6 +201,8 @@ SELECT "att"."attname" AS "name", "typ"."typname" AS "datatype", "tns"."nspname" AS "datatype_schema", + "c"."character_maximum_length" AS "length", + COALESCE("c"."numeric_precision", "c"."datetime_precision") AS "precision", "fk_ns"."nspname" AS "foreign_key_schema", "fk_cls"."relname" AS "foreign_key_table", "fk_att"."attname" AS "foreign_key_column", @@ -234,10 +276,18 @@ SELECT ), '[]' ) AS "options" - FROM "pg_catalog"."pg_attribute" AS "att" - INNER JOIN "pg_catalog"."pg_type" AS "typ" ON "typ"."oid" = "att"."atttypid" - INNER JOIN "pg_catalog"."pg_namespace" AS "tns" ON "tns"."oid" = "typ"."typnamespace" - LEFT JOIN "pg_catalog"."pg_constraint" AS "pk_con" ON "pk_con"."contype" = 'p' + + FROM "pg_catalog"."pg_attribute" AS "att" + + INNER JOIN "pg_catalog"."pg_type" AS "typ" ON "typ"."oid" = "att"."atttypid" + + INNER JOIN "pg_catalog"."pg_namespace" AS "tns" ON "tns"."oid" = "typ"."typnamespace" + + LEFT JOIN "information_schema"."columns" AS "c" ON "c"."table_schema" = "ns"."nspname" + AND "c"."table_name" = "cls"."relname" + AND "c"."column_name" = "att"."attname" + LEFT JOIN "pg_catalog"."pg_constraint" AS "pk_con" ON "pk_con"."contype" = 'p' + AND "pk_con"."conrelid" = "cls"."oid" AND "att"."attnum" = ANY ("pk_con"."conkey") LEFT JOIN "pg_catalog"."pg_constraint" AS "fk_con" ON "fk_con"."contype" = 'f' diff --git a/packages/cli/src/actions/pull/provider/provider.ts b/packages/cli/src/actions/pull/provider/provider.ts index c03c39fc..c04255d1 100644 --- a/packages/cli/src/actions/pull/provider/provider.ts +++ b/packages/cli/src/actions/pull/provider/provider.ts @@ -12,6 +12,8 @@ export interface IntrospectedTable { columns: { name: string; datatype: string; + length: number | null; + precision: number | null; datatype_schema: string; foreign_key_schema: string | null; foreign_key_table: string | null; diff --git a/packages/cli/src/actions/pull/provider/sqlite.ts b/packages/cli/src/actions/pull/provider/sqlite.ts index 160a3096..87d6e058 100644 --- a/packages/cli/src/actions/pull/provider/sqlite.ts +++ b/packages/cli/src/actions/pull/provider/sqlite.ts @@ -189,6 +189,8 @@ export const sqlite: IntrospectionProvider = { columns.push({ name: c.name, datatype: c.type || '', + length: null, + precision: null, datatype_schema: schema, foreign_key_schema: fk?.foreign_key_schema ?? null, foreign_key_table: fk?.foreign_key_table ?? null, diff --git a/packages/cli/src/actions/pull/utils.ts b/packages/cli/src/actions/pull/utils.ts index 33a6ace3..9f05219c 100644 --- a/packages/cli/src/actions/pull/utils.ts +++ b/packages/cli/src/actions/pull/utils.ts @@ -9,6 +9,7 @@ import { isInvocationExpr, type Attribute, type Model, + StringLiteral, } from '@zenstackhq/language/ast'; import { getStringLiteral } from '@zenstackhq/language/utils'; import type { DataSourceProviderType } from '@zenstackhq/sdk/schema'; @@ -61,13 +62,39 @@ export function getDatasource(model: Model) { }; } -export function getDbName(decl: AbstractDeclaration | DataField | EnumField): string { +export function getDbName(decl: AbstractDeclaration | DataField | EnumField, includeSchema: boolean = false): string { if (!('attributes' in decl)) return decl.name; + + const schemaAttr = decl.attributes.find((a) => a.decl.ref?.name === '@@schema'); + const schemaAttrValue = schemaAttr?.args[0]?.value; + let schema: string; + if (schemaAttrValue?.$type !== 'StringLiteral') schema = 'public'; + if (!schemaAttr) schema = 'public'; + else schema = (schemaAttr.args[0]?.value as any)?.value as string; + + const formatName = (name: string) => `${schema && includeSchema ? `${schema}.` : ''}${name}`; + const nameAttr = decl.attributes.find((a) => a.decl.ref?.name === '@@map' || a.decl.ref?.name === '@map'); - if (!nameAttr) return decl.name; + if (!nameAttr) return formatName(decl.name); const attrValue = nameAttr.args[0]?.value; - if (attrValue?.$type !== 'StringLiteral') return decl.name; + if (attrValue?.$type !== 'StringLiteral') return formatName(decl.name); + + return formatName(attrValue.value); +} + +export function getRelationFkName(decl: DataField): string | undefined { + const relationAttr = decl?.attributes.find((a) => a.decl.ref?.name === '@relation'); + const schemaAttrValue = relationAttr?.args.find((a) => a.name === 'map')?.value as StringLiteral; + return schemaAttrValue?.value; +} + +export function getDbSchemaName(decl: DataModel | Enum): string { + const schemaAttr = decl.attributes.find((a) => a.decl.ref?.name === '@@schema'); + if (!schemaAttr) return 'public'; + const attrValue = schemaAttr.args[0]?.value; + + if (attrValue?.$type !== 'StringLiteral') return 'public'; return attrValue.value; } diff --git a/packages/language/res/stdlib.zmodel b/packages/language/res/stdlib.zmodel index bbeafb07..834c8c6a 100644 --- a/packages/language/res/stdlib.zmodel +++ b/packages/language/res/stdlib.zmodel @@ -120,7 +120,7 @@ function dbgenerated(expr: String?): Any { /** * Checks if the field value contains the search string. By default, the search is case-sensitive, and * "LIKE" operator is used to match. If `caseInSensitive` is true, "ILIKE" operator is used if - * supported, otherwise it still falls back to "LIKE" and delivers whatever the database's + * supported, otherwise it still falls back to "LIKE" and delivers whatever the database's * behavior is. */ function contains(field: String, search: String, caseInSensitive: Boolean?): Boolean { @@ -135,7 +135,7 @@ function contains(field: String, search: String, caseInSensitive: Boolean?): Boo /** * Checks the field value starts with the search string. By default, the search is case-sensitive, and * "LIKE" operator is used to match. If `caseInSensitive` is true, "ILIKE" operator is used if - * supported, otherwise it still falls back to "LIKE" and delivers whatever the database's + * supported, otherwise it still falls back to "LIKE" and delivers whatever the database's * behavior is. */ function startsWith(field: String, search: String, caseInSensitive: Boolean?): Boolean { @@ -144,7 +144,7 @@ function startsWith(field: String, search: String, caseInSensitive: Boolean?): B /** * Checks if the field value ends with the search string. By default, the search is case-sensitive, and * "LIKE" operator is used to match. If `caseInSensitive` is true, "ILIKE" operator is used if - * supported, otherwise it still falls back to "LIKE" and delivers whatever the database's + * supported, otherwise it still falls back to "LIKE" and delivers whatever the database's * behavior is. */ function endsWith(field: String, search: String, caseInSensitive: Boolean?): Boolean { diff --git a/packages/language/src/zmodel-code-generator.ts b/packages/language/src/zmodel-code-generator.ts index 91e1dc7c..257aa62b 100644 --- a/packages/language/src/zmodel-code-generator.ts +++ b/packages/language/src/zmodel-code-generator.ts @@ -100,7 +100,10 @@ export class ZModelCodeGenerator { @gen(Model) private _generateModel(ast: Model) { - return `${ast.imports.map((d) => this.generate(d)).join('\n')}${ast.imports.length > 0 ? '\n\n' : ''}${ast.declarations.map((d) => this.generate(d)).join('\n\n')}`; + return `${ast.imports.map((d) => this.generate(d)).join('\n')}${ast.imports.length > 0 ? '\n\n' : ''}${ast.declarations + .sort((d) => (d.$type === 'Enum' ? 1 : 0)) + .map((d) => this.generate(d)) + .join('\n\n')}`; } @gen(DataSource) From 4a9158c21d987082aed329419f500cb693ddcab0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Wed, 22 Oct 2025 01:00:50 +0200 Subject: [PATCH 17/29] fix: make ignore behave it does in prisma with no index models --- packages/language/src/validators/datamodel-validator.ts | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/packages/language/src/validators/datamodel-validator.ts b/packages/language/src/validators/datamodel-validator.ts index f3b9f25c..6ec74717 100644 --- a/packages/language/src/validators/datamodel-validator.ts +++ b/packages/language/src/validators/datamodel-validator.ts @@ -48,12 +48,14 @@ export default class DataModelValidator implements AstValidator { const uniqueFields = allFields.filter((f) => f.attributes.find((attr) => attr.decl.ref?.name === '@unique')); const modelLevelIds = getModelIdFields(dm); const modelUniqueFields = getModelUniqueFields(dm); + const ignore = hasAttribute(dm, '@@ignore'); if ( idFields.length === 0 && modelLevelIds.length === 0 && uniqueFields.length === 0 && - modelUniqueFields.length === 0 + modelUniqueFields.length === 0 && + !ignore ) { accept( 'error', From c7809b3d3bd8e9942ee3d85c28f86c491d832d2e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Wed, 22 Oct 2025 01:21:31 +0200 Subject: [PATCH 18/29] fix: lint fix --- packages/cli/src/actions/pull/index.ts | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/packages/cli/src/actions/pull/index.ts b/packages/cli/src/actions/pull/index.ts index c857fe16..1939b9fc 100644 --- a/packages/cli/src/actions/pull/index.ts +++ b/packages/cli/src/actions/pull/index.ts @@ -1,14 +1,13 @@ import type { ZModelServices } from '@zenstackhq/language'; -import { isEnum, type DataField, type DataModel, type Enum, type Model, Attribute } from '@zenstackhq/language/ast'; +import { Attribute, isEnum, type DataField, type DataModel, type Enum, type Model } from '@zenstackhq/language/ast'; import { - DataFieldFactory, - DataModelFactory, - EnumFactory, - ModelFactory, - DataFieldAttributeFactory, + DataFieldAttributeFactory, + DataFieldFactory, + DataModelFactory, + EnumFactory } from '@zenstackhq/language/factory'; import type { PullOptions } from '../db'; -import type { IntrospectedEnum, IntrospectedTable, IntrospectionProvider, Cascade } from './provider'; +import type { Cascade, IntrospectedEnum, IntrospectedTable, IntrospectionProvider } from './provider'; import { getAttributeRef, getDbName, getEnumRef } from './utils'; export function syncEnums({ From b794a66627c11eeeca6d9a7e1cffbc17fa27da2a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Wed, 22 Oct 2025 01:47:06 +0200 Subject: [PATCH 19/29] feat: make all format options configurable --- packages/cli/src/actions/db.ts | 14 +++++----- packages/cli/src/actions/pull/index.ts | 36 ++++++++++++-------------- packages/cli/src/actions/pull/utils.ts | 2 +- packages/cli/src/index.ts | 17 ++++++++++++ 4 files changed, 43 insertions(+), 26 deletions(-) diff --git a/packages/cli/src/actions/db.ts b/packages/cli/src/actions/db.ts index 8da32319..e48beb23 100644 --- a/packages/cli/src/actions/db.ts +++ b/packages/cli/src/actions/db.ts @@ -17,10 +17,13 @@ type PushOptions = { export type PullOptions = { schema?: string; - out?: string; - naming?: 'pascal' | 'camel' | 'snake' | 'kebab' | 'none'; - alwaysMap?: boolean; excludeSchemas?: string[]; + out?: string; + modelCasing: 'pascal' | 'camel' | 'snake' | 'kebab' | 'none'; + fieldCasing: 'pascal' | 'camel' | 'snake' | 'kebab' | 'none'; + alwaysMap: boolean; + quote: 'single' | 'double'; + indent: number; }; /** @@ -281,9 +284,8 @@ async function runPull(options: PullOptions) { } const generator = new ZModelCodeGenerator({ - //TODO: make configurable - quote: 'double', - indent: 2, + quote: options.quote, + indent: options.indent, }); if (options.out) { diff --git a/packages/cli/src/actions/pull/index.ts b/packages/cli/src/actions/pull/index.ts index 1939b9fc..4bc7dd14 100644 --- a/packages/cli/src/actions/pull/index.ts +++ b/packages/cli/src/actions/pull/index.ts @@ -1,10 +1,10 @@ import type { ZModelServices } from '@zenstackhq/language'; import { Attribute, isEnum, type DataField, type DataModel, type Enum, type Model } from '@zenstackhq/language/ast'; import { - DataFieldAttributeFactory, - DataFieldFactory, - DataModelFactory, - EnumFactory + DataFieldAttributeFactory, + DataFieldFactory, + DataModelFactory, + EnumFactory, } from '@zenstackhq/language/factory'; import type { PullOptions } from '../db'; import type { Cascade, IntrospectedEnum, IntrospectedTable, IntrospectionProvider } from './provider'; @@ -22,10 +22,10 @@ export function syncEnums({ options: PullOptions; }) { for (const dbEnum of dbEnums) { - const { modified, name } = resolveNameCasing(options, dbEnum.enum_type); + const { modified, name } = resolveNameCasing(options.modelCasing, dbEnum.enum_type); if (modified) console.log(`Mapping enum ${dbEnum.enum_type} to ${name}`); const factory = new EnumFactory().setName(name); - if (modified) + if (modified || options.alwaysMap) factory.addAttribute((builder) => builder .setDecl(getAttributeRef('@@map', services)) @@ -33,10 +33,10 @@ export function syncEnums({ ); dbEnum.values.forEach((v) => { - const { name, modified } = resolveNameCasing(options, v); + const { name, modified } = resolveNameCasing(options.fieldCasing, v); factory.addField((builder) => { builder.setName(name); - if (modified) + if (modified || options.alwaysMap) builder.addAttribute((builder) => builder .setDecl(getAttributeRef('@map', services)) @@ -64,10 +64,11 @@ export function syncEnums({ } } -function resolveNameCasing(options: PullOptions, originalName: string) { +function resolveNameCasing(casing: 'pascal' | 'camel' | 'snake' | 'kebab' | 'none', originalName: string) { let name = originalName; + const fieldPrefix = /[0-9]/g.test(name.charAt(0)) ? '_' : ''; - switch (options.naming) { + switch (casing) { case 'pascal': name = toPascalCase(originalName); break; @@ -83,8 +84,8 @@ function resolveNameCasing(options: PullOptions, originalName: string) { } return { - modified: options.alwaysMap ? true : name !== originalName, - name, + modified: name !== originalName || fieldPrefix !== '', + name: `${fieldPrefix}${name}`, }; } @@ -163,13 +164,13 @@ export function syncTable({ } const relations: Relation[] = []; - const { name, modified } = resolveNameCasing({ ...options, naming: 'pascal' }, table.name); + const { name, modified } = resolveNameCasing(options.modelCasing, table.name); const multiPk = table.columns.filter((c) => c.pk).length > 1; const modelFactory = new DataModelFactory().setName(name).setIsView(table.type === 'view'); modelFactory.setContainer(model); - if (modified) { + if (modified || options.alwaysMap) { modelFactory.addAttribute((builder) => builder.setDecl(tableMapAttribute).addArg((argBuilder) => argBuilder.StringLiteral.setValue(table.name)), ); @@ -195,10 +196,7 @@ export function syncTable({ }); } - const fieldPrefix = /[0-9]/g.test(column.name.charAt(0)) ? '_' : ''; - const { name: _name, modified: _modified } = resolveNameCasing(options, column.name); - const name = `${fieldPrefix}${_name}`; - const modified = fieldPrefix !== '' || _modified; + const { name, modified } = resolveNameCasing(options.fieldCasing, column.name); const builtinType = provider.getBuiltinType(column.datatype); @@ -252,7 +250,7 @@ export function syncTable({ return b; }); } - if (modified) { + if (modified || options.alwaysMap) { builder.addAttribute((ab) => ab.setDecl(fieldMapAttribute).addArg((ab) => ab.StringLiteral.setValue(column.name)), ); diff --git a/packages/cli/src/actions/pull/utils.ts b/packages/cli/src/actions/pull/utils.ts index 9f05219c..2b3b9b40 100644 --- a/packages/cli/src/actions/pull/utils.ts +++ b/packages/cli/src/actions/pull/utils.ts @@ -9,7 +9,7 @@ import { isInvocationExpr, type Attribute, type Model, - StringLiteral, + type StringLiteral, } from '@zenstackhq/language/ast'; import { getStringLiteral } from '@zenstackhq/language/utils'; import type { DataSourceProviderType } from '@zenstackhq/sdk/schema'; diff --git a/packages/cli/src/index.ts b/packages/cli/src/index.ts index 0774a4e3..8c0e54a3 100644 --- a/packages/cli/src/index.ts +++ b/packages/cli/src/index.ts @@ -144,6 +144,23 @@ function createProgram() { .addOption(noVersionCheckOption) .addOption(new Option('-e, --exclude-schemas ', 'exclude specific schemas from introspection')) .addOption(new Option('-o, --out ', 'add custom output path for the introspected schema')) + .addOption( + new Option('--model-casing ', 'set the casing of generated models').default( + 'none', + ), + ) + .addOption( + new Option('--field-casing ', 'set the casing of generated fields').default( + 'none', + ), + ) + .addOption( + new Option('--always-map', 'always add @map and @@map attributes to models and fields').default(false), + ) + .addOption( + new Option('--quote ', 'set the quote style of generated schema files').default('double'), + ) + .addOption(new Option('--indent ', 'set the indentation of the generated schema files').default(2)) .action((options) => dbAction('pull', options)); dbCommand From 419515e6a0ca1dd305ff9300b86fcbdb422f087b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Wed, 22 Oct 2025 01:55:41 +0200 Subject: [PATCH 20/29] fix: lint fix --- packages/cli/src/actions/pull/index.ts | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/packages/cli/src/actions/pull/index.ts b/packages/cli/src/actions/pull/index.ts index 4bc7dd14..81a7e56e 100644 --- a/packages/cli/src/actions/pull/index.ts +++ b/packages/cli/src/actions/pull/index.ts @@ -55,7 +55,8 @@ export function syncEnums({ .addArg((a) => a.StringLiteral.setValue(dbEnum.schema_name)), ); } - } catch (_error: unknown) { + } catch (error: any) { + if (error?.message !== `Declaration not found: @@schema`) throw error; //Waiting to support multi-schema //TODO: remove catch after multi-schema support is implemented } @@ -362,7 +363,8 @@ export function syncTable({ b.setDecl(getAttributeRef('@@schema', services)).addArg((a) => a.StringLiteral.setValue(table.schema)), ); } - } catch (_error: unknown) { + } catch (error: any) { + if (error?.message !== `Declaration not found: @@schema`) throw error; //Waiting to support multi-schema //TODO: remove catch after multi-schema support is implemented } From d9cd916ea07cd9dfc118675f7e424f5e2a1030b9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Wed, 22 Oct 2025 02:53:37 +0200 Subject: [PATCH 21/29] feat: Handle the database type mapping --- packages/cli/src/actions/pull/index.ts | 23 ++++++++++++++--- .../src/actions/pull/provider/postgresql.ts | 25 ++++++++++++++++++- .../cli/src/actions/pull/provider/provider.ts | 1 + .../cli/src/actions/pull/provider/sqlite.ts | 24 ++++++++++++++++++ 4 files changed, 68 insertions(+), 5 deletions(-) diff --git a/packages/cli/src/actions/pull/index.ts b/packages/cli/src/actions/pull/index.ts index 81a7e56e..75f54b44 100644 --- a/packages/cli/src/actions/pull/index.ts +++ b/packages/cli/src/actions/pull/index.ts @@ -1,5 +1,13 @@ import type { ZModelServices } from '@zenstackhq/language'; -import { Attribute, isEnum, type DataField, type DataModel, type Enum, type Model } from '@zenstackhq/language/ast'; +import { + Attribute, + isEnum, + type DataField, + type DataModel, + type Enum, + type Model, + type BuiltinType, +} from '@zenstackhq/language/ast'; import { DataFieldAttributeFactory, DataFieldFactory, @@ -260,9 +268,16 @@ export function syncTable({ const dbAttr = services.shared.workspace.IndexManager.allElements('Attribute').find( (d) => d.name.toLowerCase() === `@db.${column.datatype.toLowerCase()}`, )?.node as Attribute | undefined; - //TODO: exclude default types like text in postgres - //because Zenstack string = text in postgres so unnecessary to map to default types - if (dbAttr && !['text'].includes(column.datatype)) { + + const defaultDatabaseType = provider.getDefaultDatabaseType(builtinType.type as BuiltinType); + + if ( + dbAttr && + defaultDatabaseType && + (defaultDatabaseType.type !== column.datatype || + (defaultDatabaseType.precisition && + defaultDatabaseType.precisition !== (column.length || column.precision))) + ) { const dbAttrFactory = new DataFieldAttributeFactory().setDecl(dbAttr); if (column.length || column.precision) dbAttrFactory.addArg((a) => a.NumberLiteral.setValue(column.length! || column.precision!)); diff --git a/packages/cli/src/actions/pull/provider/postgresql.ts b/packages/cli/src/actions/pull/provider/postgresql.ts index 30bb2602..8e1457a3 100644 --- a/packages/cli/src/actions/pull/provider/postgresql.ts +++ b/packages/cli/src/actions/pull/provider/postgresql.ts @@ -2,6 +2,7 @@ import { DataFieldAttributeFactory } from '@zenstackhq/language/factory'; import { Client } from 'pg'; import { getAttributeRef, getDbName, getFunctionRef } from '../utils'; import type { IntrospectedEnum, IntrospectedSchema, IntrospectedTable, IntrospectionProvider } from './provider'; +import type { BuiltinType } from '@zenstackhq/language/ast'; export const postgresql: IntrospectionProvider = { getBuiltinType(type) { @@ -78,6 +79,28 @@ export const postgresql: IntrospectionProvider = { tables, }; }, + getDefaultDatabaseType(type: BuiltinType) { + switch (type) { + case 'String': + return { type: 'text' }; + case 'Boolean': + return { type: 'boolean' }; + case 'Int': + return { type: 'integer' }; + case 'BigInt': + return { type: 'bigint' }; + case 'Float': + return { type: 'double precision' }; + case 'Decimal': + return { type: 'decimal' }; + case 'DateTime': + return { type: 'timestamp', precisition: 3 }; + case 'Json': + return { type: 'jsonb' }; + case 'Bytes': + return { type: 'bytea' }; + } + }, getDefaultValue({ defaultValue, fieldName, services, enums }) { const val = defaultValue.trim(); const factories: DataFieldAttributeFactory[] = []; @@ -276,7 +299,7 @@ SELECT ), '[]' ) AS "options" - + FROM "pg_catalog"."pg_attribute" AS "att" INNER JOIN "pg_catalog"."pg_type" AS "typ" ON "typ"."oid" = "att"."atttypid" diff --git a/packages/cli/src/actions/pull/provider/provider.ts b/packages/cli/src/actions/pull/provider/provider.ts index c04255d1..0bdc2d67 100644 --- a/packages/cli/src/actions/pull/provider/provider.ts +++ b/packages/cli/src/actions/pull/provider/provider.ts @@ -64,6 +64,7 @@ export interface IntrospectionProvider { type: BuiltinType | 'Unsupported'; isArray: boolean; }; + getDefaultDatabaseType(type: BuiltinType): { precisition?: number; type: string } | undefined; getDefaultValue(args: { fieldName: string; defaultValue: string; diff --git a/packages/cli/src/actions/pull/provider/sqlite.ts b/packages/cli/src/actions/pull/provider/sqlite.ts index 87d6e058..5c339760 100644 --- a/packages/cli/src/actions/pull/provider/sqlite.ts +++ b/packages/cli/src/actions/pull/provider/sqlite.ts @@ -1,3 +1,4 @@ +import type { BuiltinType } from '@zenstackhq/language/ast'; import type { IntrospectedEnum, IntrospectedSchema, IntrospectedTable, IntrospectionProvider } from './provider'; // Note: We dynamically import better-sqlite3 inside the async function to avoid @@ -71,6 +72,29 @@ export const sqlite: IntrospectionProvider = { } }, + getDefaultDatabaseType(type: BuiltinType) { + switch (type) { + case 'String': + return { type: 'TEXT' }; + case 'Boolean': + return { type: 'INTEGER' }; + case 'Int': + return { type: 'INTEGER' }; + case 'BigInt': + return { type: 'INTEGER' }; + case 'Float': + return { type: 'REAL' }; + case 'Decimal': + return { type: 'DECIMAL' }; + case 'DateTime': + return { type: 'NUMERIC' }; + case 'Json': + return { type: 'JSONB' }; + case 'Bytes': + return { type: 'BLOB' }; + } + }, + async introspect(connectionString: string): Promise { const SQLite = (await import('better-sqlite3')).default; const db = new SQLite(connectionString, { readonly: true }); From 3384f8b3ca6f3ec535c2677ed0cbd467b389296d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Wed, 12 Nov 2025 21:43:46 +0100 Subject: [PATCH 22/29] fix: catch up with feature updates - improve code styling - enable schema support for db pull --- packages/cli/package.json | 1 + packages/cli/src/actions/action-utils.ts | 39 ++++++++++--------- packages/cli/src/actions/db.ts | 24 +++++++----- packages/cli/src/actions/pull/index.ts | 38 +++++++----------- .../cli/src/actions/pull/provider/index.ts | 4 +- packages/cli/src/actions/pull/utils.ts | 18 ++++++++- packages/cli/src/index.ts | 1 - pnpm-lock.yaml | 3 ++ 8 files changed, 74 insertions(+), 54 deletions(-) diff --git a/packages/cli/package.json b/packages/cli/package.json index 1b5f9454..3eeda469 100644 --- a/packages/cli/package.json +++ b/packages/cli/package.json @@ -31,6 +31,7 @@ "dependencies": { "@dotenvx/dotenvx": "^1.51.0", "@zenstackhq/common-helpers": "workspace:*", + "@zenstackhq/schema": "workspace:*", "@zenstackhq/language": "workspace:*", "@zenstackhq/sdk": "workspace:*", "colors": "1.4.0", diff --git a/packages/cli/src/actions/action-utils.ts b/packages/cli/src/actions/action-utils.ts index 32e38fe2..7bf0350c 100644 --- a/packages/cli/src/actions/action-utils.ts +++ b/packages/cli/src/actions/action-utils.ts @@ -1,5 +1,5 @@ -import { loadDocument } from '@zenstackhq/language'; -import { isDataSource } from '@zenstackhq/language/ast'; +import { type ZModelServices, loadDocument } from '@zenstackhq/language'; +import { type Model, isDataSource } from '@zenstackhq/language/ast'; import { PrismaSchemaGenerator } from '@zenstackhq/sdk'; import colors from 'colors'; import fs from 'node:fs'; @@ -41,22 +41,22 @@ export function getSchemaFile(file?: string) { } } -export async function loadSchemaDocument(schemaFile: string) { - const loadResult = await loadDocument(schemaFile); - if (!loadResult.success) { - loadResult.errors.forEach((err) => { - console.error(colors.red(err)); - }); - throw new CliError('Schema contains errors. See above for details.'); - } - loadResult.warnings.forEach((warn) => { - console.warn(colors.yellow(warn)); - }); - return loadResult.model; -} +export async function loadSchemaDocument( + schemaFile: string, + opts?: { keepImports?: boolean; returnServices?: false }, +): Promise; +export async function loadSchemaDocument( + schemaFile: string, + opts: { returnServices: true; keepImports?: boolean }, +): Promise<{ model: Model; services: ZModelServices }>; +export async function loadSchemaDocument( + schemaFile: string, + opts: { returnServices?: boolean; keepImports?: boolean } = {}, +) { + const returnServices = opts.returnServices || false; + const keepImports = opts.keepImports || false; -export async function loadSchemaDocumentWithServices(schemaFile: string) { - const loadResult = await loadDocument(schemaFile, [], true); + const loadResult = await loadDocument(schemaFile, [], keepImports); if (!loadResult.success) { loadResult.errors.forEach((err) => { console.error(colors.red(err)); @@ -66,7 +66,10 @@ export async function loadSchemaDocumentWithServices(schemaFile: string) { loadResult.warnings.forEach((warn) => { console.warn(colors.yellow(warn)); }); - return { services: loadResult.services, model: loadResult.model }; + + if (returnServices) return { model: loadResult.model, services: loadResult.services }; + + return loadResult.model; } export function handleSubProcessError(err: unknown) { diff --git a/packages/cli/src/actions/db.ts b/packages/cli/src/actions/db.ts index e48beb23..77f758ea 100644 --- a/packages/cli/src/actions/db.ts +++ b/packages/cli/src/actions/db.ts @@ -1,5 +1,6 @@ -import { Model, Enum, DataModel, DataField } from '@zenstackhq/language/ast'; -import { ZModelCodeGenerator } from '@zenstackhq/sdk'; +import { config } from '@dotenvx/dotenvx'; +import { ZModelCodeGenerator } from '@zenstackhq/language'; +import { type DataField, DataModel, Enum, type Model } from '@zenstackhq/language/ast'; import fs from 'node:fs'; import path from 'node:path'; import { execPrisma } from '../utils/exec-utils'; @@ -7,7 +8,6 @@ import { generateTempPrismaSchema, getSchemaFile, handleSubProcessError, require import { syncEnums, syncRelation, syncTable, type Relation } from './pull'; import { providers } from './pull/provider'; import { getDatasource, getDbName, getRelationFkName } from './pull/utils'; -import { config } from '@dotenvx/dotenvx'; type PushOptions = { schema?: string; @@ -17,7 +17,6 @@ type PushOptions = { export type PullOptions = { schema?: string; - excludeSchemas?: string[]; out?: string; modelCasing: 'pascal' | 'camel' | 'snake' | 'kebab' | 'none'; fieldCasing: 'pascal' | 'camel' | 'snake' | 'kebab' | 'none'; @@ -74,7 +73,7 @@ async function runPush(options: PushOptions) { async function runPull(options: PullOptions) { try { const schemaFile = getSchemaFile(options.schema); - const { model, services } = await loadSchemaDocumentWithServices(schemaFile); + const { model, services } = await loadSchemaDocument(schemaFile, { returnServices: true }); config(); const SUPPORTED_PROVIDERS = ['sqlite', 'postgresql']; const datasource = getDatasource(model); @@ -94,8 +93,8 @@ async function runPull(options: PullOptions) { } const { enums: allEnums, tables: allTables } = await provider.introspect(datasource.url); - const enums = allEnums.filter((e) => !options.excludeSchemas?.includes(e.schema_name)); - const tables = allTables.filter((t) => !options.excludeSchemas?.includes(t.schema)); + const enums = allEnums.filter((e) => datasource.schemas.includes(e.schema_name)); + const tables = allTables.filter((t) => datasource.schemas.includes(t.schema)); const newModel: Model = { $type: 'Model', @@ -106,11 +105,18 @@ async function runPull(options: PullOptions) { imports: [], }; - syncEnums({ dbEnums: enums, model: newModel, services, options }); + syncEnums({ dbEnums: enums, model: newModel, services, options, defaultSchema: datasource.defaultSchema }); const resolvedRelations: Relation[] = []; for (const table of tables) { - const relations = syncTable({ table, model: newModel, provider, services, options }); + const relations = syncTable({ + table, + model: newModel, + provider, + services, + options, + defaultSchema: datasource.defaultSchema, + }); resolvedRelations.push(...relations); } diff --git a/packages/cli/src/actions/pull/index.ts b/packages/cli/src/actions/pull/index.ts index 75f54b44..19d86a22 100644 --- a/packages/cli/src/actions/pull/index.ts +++ b/packages/cli/src/actions/pull/index.ts @@ -1,7 +1,7 @@ import type { ZModelServices } from '@zenstackhq/language'; import { - Attribute, isEnum, + type Attribute, type DataField, type DataModel, type Enum, @@ -23,11 +23,13 @@ export function syncEnums({ model, options, services, + defaultSchema, }: { dbEnums: IntrospectedEnum[]; model: Model; services: ZModelServices; options: PullOptions; + defaultSchema: string; }) { for (const dbEnum of dbEnums) { const { modified, name } = resolveNameCasing(options.modelCasing, dbEnum.enum_type); @@ -55,18 +57,12 @@ export function syncEnums({ }); }); - try { - if (dbEnum.schema_name && dbEnum.schema_name !== '' && dbEnum.schema_name !== 'public') { - factory.addAttribute((b) => - b - .setDecl(getAttributeRef('@@schema', services)) - .addArg((a) => a.StringLiteral.setValue(dbEnum.schema_name)), - ); - } - } catch (error: any) { - if (error?.message !== `Declaration not found: @@schema`) throw error; - //Waiting to support multi-schema - //TODO: remove catch after multi-schema support is implemented + if (dbEnum.schema_name && dbEnum.schema_name !== '' && dbEnum.schema_name !== defaultSchema) { + factory.addAttribute((b) => + b + .setDecl(getAttributeRef('@@schema', services)) + .addArg((a) => a.StringLiteral.setValue(dbEnum.schema_name)), + ); } model.declarations.push(factory.get({ $container: model })); @@ -143,12 +139,14 @@ export function syncTable({ table, services, options, + defaultSchema, }: { table: IntrospectedTable; model: Model; provider: IntrospectionProvider; services: ZModelServices; options: PullOptions; + defaultSchema: string; }) { const idAttribute = getAttributeRef('@id', services); const modelIdAttribute = getAttributeRef('@@id', services); @@ -372,16 +370,10 @@ export function syncTable({ ); }); - try { - if (table.schema && table.schema !== '' && table.schema !== 'public') { - modelFactory.addAttribute((b) => - b.setDecl(getAttributeRef('@@schema', services)).addArg((a) => a.StringLiteral.setValue(table.schema)), - ); - } - } catch (error: any) { - if (error?.message !== `Declaration not found: @@schema`) throw error; - //Waiting to support multi-schema - //TODO: remove catch after multi-schema support is implemented + if (table.schema && table.schema !== '' && table.schema !== defaultSchema) { + modelFactory.addAttribute((b) => + b.setDecl(getAttributeRef('@@schema', services)).addArg((a) => a.StringLiteral.setValue(table.schema)), + ); } model.declarations.push(modelFactory.node); diff --git a/packages/cli/src/actions/pull/provider/index.ts b/packages/cli/src/actions/pull/provider/index.ts index 4c9a0fe8..e712ac98 100644 --- a/packages/cli/src/actions/pull/provider/index.ts +++ b/packages/cli/src/actions/pull/provider/index.ts @@ -1,9 +1,11 @@ +import type { DataSourceProviderType } from '@zenstackhq/schema'; export * from './provider'; import { postgresql } from './postgresql'; +import type { IntrospectionProvider } from './provider'; import { sqlite } from './sqlite'; -export const providers = { +export const providers: Record = { postgresql, sqlite, }; diff --git a/packages/cli/src/actions/pull/utils.ts b/packages/cli/src/actions/pull/utils.ts index 2b3b9b40..05aa31ac 100644 --- a/packages/cli/src/actions/pull/utils.ts +++ b/packages/cli/src/actions/pull/utils.ts @@ -11,8 +11,8 @@ import { type Model, type StringLiteral, } from '@zenstackhq/language/ast'; -import { getStringLiteral } from '@zenstackhq/language/utils'; -import type { DataSourceProviderType } from '@zenstackhq/sdk/schema'; +import { getLiteralArray, getStringLiteral } from '@zenstackhq/language/utils'; +import type { DataSourceProviderType } from '@zenstackhq/schema'; import type { Reference } from 'langium'; export function getAttribute(model: Model, attrName: string) { @@ -53,12 +53,26 @@ export function getDatasource(model: Model) { throw new Error('The url field must be a string literal or an env().'); } + const defaultSchemaField = datasource.fields.find((f) => f.name === 'defaultSchema'); + const defaultSchema = (defaultSchemaField && getStringLiteral(defaultSchemaField.value)) || 'public'; + + const schemasField = datasource.fields.find((f) => f.name === 'schemas'); + const schemas = + (schemasField && + getLiteralArray(schemasField.value) + ?.map(getStringLiteral) + .filter((s) => s !== undefined)) || + []; + return { name: datasource.name, provider: getStringLiteral( datasource.fields.find((f) => f.name === 'provider')?.value, ) as DataSourceProviderType, url, + defaultSchema, + schemas, + allSchemas: [defaultSchema, ...schemas], }; } diff --git a/packages/cli/src/index.ts b/packages/cli/src/index.ts index 8c0e54a3..97cc89dd 100644 --- a/packages/cli/src/index.ts +++ b/packages/cli/src/index.ts @@ -142,7 +142,6 @@ function createProgram() { .description('Introspect your database.') .addOption(schemaOption) .addOption(noVersionCheckOption) - .addOption(new Option('-e, --exclude-schemas ', 'exclude specific schemas from introspection')) .addOption(new Option('-o, --out ', 'add custom output path for the introspected schema')) .addOption( new Option('--model-casing ', 'set the casing of generated models').default( diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 1236121a..abfe277a 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -162,6 +162,9 @@ importers: '@zenstackhq/language': specifier: workspace:* version: link:../language + '@zenstackhq/schema': + specifier: workspace:* + version: link:../schema '@zenstackhq/sdk': specifier: workspace:* version: link:../sdk From c3f8357e6add4febb791c61a850d97f88d25e56a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Fri, 21 Nov 2025 01:11:08 +0100 Subject: [PATCH 23/29] fix: add sqlite e2e test and fix some bugs --- packages/cli/package.json | 5 +- packages/cli/src/actions/db.ts | 178 +++++++++--------- packages/cli/src/actions/pull/index.ts | 159 +++++++++------- .../src/actions/pull/provider/postgresql.ts | 10 +- .../cli/src/actions/pull/provider/provider.ts | 3 + .../cli/src/actions/pull/provider/sqlite.ts | 137 +++++++------- packages/cli/src/actions/pull/utils.ts | 5 + packages/cli/src/index.ts | 2 +- packages/cli/src/test.ts | 9 + packages/cli/test/db/pull.test.ts | 96 ++++++++++ packages/cli/test/db/push.test.ts | 18 ++ pnpm-lock.yaml | 145 +++++++------- 12 files changed, 470 insertions(+), 297 deletions(-) create mode 100644 packages/cli/src/test.ts create mode 100644 packages/cli/test/db/pull.test.ts create mode 100644 packages/cli/test/db/push.test.ts diff --git a/packages/cli/package.json b/packages/cli/package.json index 3eeda469..0e0c2444 100644 --- a/packages/cli/package.json +++ b/packages/cli/package.json @@ -31,8 +31,8 @@ "dependencies": { "@dotenvx/dotenvx": "^1.51.0", "@zenstackhq/common-helpers": "workspace:*", - "@zenstackhq/schema": "workspace:*", "@zenstackhq/language": "workspace:*", + "@zenstackhq/schema": "workspace:*", "@zenstackhq/sdk": "workspace:*", "colors": "1.4.0", "commander": "^8.3.0", @@ -44,7 +44,8 @@ "package-manager-detector": "^1.3.0", "prisma": "catalog:", "semver": "^7.7.2", - "ts-pattern": "catalog:" + "ts-pattern": "catalog:", + "vscode-uri": "^3.1.0" }, "devDependencies": { "@types/better-sqlite3": "catalog:", diff --git a/packages/cli/src/actions/db.ts b/packages/cli/src/actions/db.ts index 77f758ea..6ea1bc08 100644 --- a/packages/cli/src/actions/db.ts +++ b/packages/cli/src/actions/db.ts @@ -74,10 +74,11 @@ async function runPull(options: PullOptions) { try { const schemaFile = getSchemaFile(options.schema); const { model, services } = await loadSchemaDocument(schemaFile, { returnServices: true }); - config(); + config({ + ignore: ['MISSING_ENV_FILE'], + }); const SUPPORTED_PROVIDERS = ['sqlite', 'postgresql']; const datasource = getDatasource(model); - if (!datasource) { throw new Error('No datasource found in the schema.'); } @@ -91,10 +92,14 @@ async function runPull(options: PullOptions) { if (!provider) { throw new Error(`No introspection provider found for: ${datasource.provider}`); } - + console.log('Starging introspect the database...'); const { enums: allEnums, tables: allTables } = await provider.introspect(datasource.url); - const enums = allEnums.filter((e) => datasource.schemas.includes(e.schema_name)); - const tables = allTables.filter((t) => datasource.schemas.includes(t.schema)); + const enums = provider.isSupportedFeature('Schema') + ? allEnums.filter((e) => datasource.schemas.includes(e.schema_name)) + : allEnums; + const tables = provider.isSupportedFeature('Schema') + ? allTables.filter((t) => datasource.schemas.includes(t.schema)) + : allTables; const newModel: Model = { $type: 'Model', @@ -104,8 +109,15 @@ async function runPull(options: PullOptions) { declarations: [...model.declarations.filter((d) => ['DataSource'].includes(d.$type))], imports: [], }; - - syncEnums({ dbEnums: enums, model: newModel, services, options, defaultSchema: datasource.defaultSchema }); + syncEnums({ + dbEnums: enums, + model: newModel, + services, + options, + defaultSchema: datasource.defaultSchema, + oldModel: model, + provider, + }); const resolvedRelations: Relation[] = []; for (const table of tables) { @@ -116,21 +128,23 @@ async function runPull(options: PullOptions) { services, options, defaultSchema: datasource.defaultSchema, + oldModel: model, }); resolvedRelations.push(...relations); } - + // sync relation fields for (const relation of resolvedRelations) { const simmilarRelations = resolvedRelations.filter((rr) => { return ( - (rr.schema === relation.schema && + rr !== relation && + ((rr.schema === relation.schema && rr.table === relation.table && rr.references.schema === relation.references.schema && rr.references.table === relation.references.table) || - (rr.schema === relation.references.schema && - rr.column === relation.references.column && - rr.references.schema === relation.schema && - rr.references.table === relation.table) + (rr.schema === relation.references.schema && + rr.column === relation.references.column && + rr.references.schema === relation.schema && + rr.references.table === relation.table)) ); }).length; const selfRelation = @@ -151,6 +165,7 @@ async function runPull(options: PullOptions) { .toArray(); const docsSet = new Set(docs.map((d) => d.uri.toString())); + //Delete models services.shared.workspace.IndexManager.allElements('DataModel', docsSet) .filter( (declaration) => @@ -162,32 +177,35 @@ async function runPull(options: PullOptions) { model.declarations.splice(index, 1); console.log(`Delete model ${decl.name}`); }); - services.shared.workspace.IndexManager.allElements('Enum', docsSet) - .filter( - (declaration) => - !newModel.declarations.find((d) => getDbName(d) === getDbName(declaration.node as any)), - ) - .forEach((decl) => { - const model = decl.node!.$container as Model; - const index = model.declarations.findIndex((d) => d === decl.node); - model.declarations.splice(index, 1); - console.log(`Delete enum ${decl.name}`); - }); + // Delete Enums + if (provider.isSupportedFeature('NativeEnum')) + services.shared.workspace.IndexManager.allElements('Enum', docsSet) + .filter( + (declaration) => + !newModel.declarations.find((d) => getDbName(d) === getDbName(declaration.node as any)), + ) + .forEach((decl) => { + const model = decl.node!.$container as Model; + const index = model.declarations.findIndex((d) => d === decl.node); + model.declarations.splice(index, 1); + console.log(`Delete enum ${decl.name}`); + }); + // newModel.declarations .filter((d) => [DataModel, Enum].includes(d.$type)) .forEach((_declaration) => { - const declaration = _declaration as DataModel | Enum; + const newDataModel = _declaration as DataModel | Enum; const declarations = services.shared.workspace.IndexManager.allElements( - declaration.$type, + newDataModel.$type, docsSet, ).toArray(); - const originalModel = declarations.find((d) => getDbName(d.node as any) === getDbName(declaration)) + const originalDataModel = declarations.find((d) => getDbName(d.node as any) === getDbName(newDataModel)) ?.node as DataModel | Enum | undefined; - if (!originalModel) { - model.declarations.push(declaration); - (declaration as any).$container = model; - declaration.fields.forEach((f) => { + if (!originalDataModel) { + model.declarations.push(newDataModel); + (newDataModel as any).$container = model; + newDataModel.fields.forEach((f) => { if (f.$type === 'DataField' && f.type.reference?.ref) { const ref = declarations.find( (d) => getDbName(d.node as any) === getDbName(f.type.reference!.ref as any), @@ -198,19 +216,33 @@ async function runPull(options: PullOptions) { return; } - declaration.fields.forEach((f) => { - const originalField = originalModel.fields.find( - (d) => + newDataModel.fields.forEach((f) => { + const originalFields = originalDataModel.fields.filter((d) => { + return ( getDbName(d) === getDbName(f) || (getRelationFkName(d as any) === getRelationFkName(f as any) && !!getRelationFkName(d as any) && - !!getRelationFkName(f as any)), - ); + !!getRelationFkName(f as any)) || + (f.$type === 'DataField' && + d.$type === 'DataField' && + f.type.reference?.ref && + d.type.reference?.ref && + getDbName(f.type.reference.ref) === getDbName(d.type.reference.ref)) + ); + }); + if (originalFields.length > 1) { + console.warn( + `Found more original fields, need to tweak the search algorith. ${originalDataModel.name}->[${originalFields.map((of) => of.name).join(', ')}](${f.name})`, + ); + return; + } + const originalField = originalFields.at(0); + Object.freeze(originalField); if (!originalField) { - //console.log(`Added field ${f.name} to ${originalModel.name}`); - (f as any).$container = originalModel; - originalModel.fields.push(f as any); + console.log(`Added field ${f.name} to ${originalDataModel.name}`); + (f as any).$container = originalDataModel; + originalDataModel.fields.push(f as any); if (f.$type === 'DataField' && f.type.reference?.ref) { const ref = declarations.find( (d) => getDbName(d.node as any) === getDbName(f.type.reference!.ref as any), @@ -222,66 +254,42 @@ async function runPull(options: PullOptions) { } return; } - - if (originalField.$type === 'DataField') { - const field = f as DataField; - originalField.type = field.type; - if (field.type.reference) { - const ref = declarations.find( - (d) => getDbName(d.node as any) === getDbName(field.type.reference!.ref as any), - )?.node as DataModel | undefined; - if (ref) { - (field.type.reference.$refText as any) = ref.name; - (field.type.reference.ref as any) = ref; - } - } - - (originalField.type.$container as any) = originalField; - } - - f.attributes.forEach((attr) => { - const originalAttribute = originalField.attributes.find( - (d) => d.decl.$refText === attr.decl.$refText, - ); - - if (!originalAttribute) { - //console.log(`Added Attribute ${attr.decl.$refText} to ${f.name}`); - (f as any).$container = originalField; - originalField.attributes.push(attr as any); - return; - } - - originalAttribute.args = attr.args; - attr.args.forEach((a) => { - (a.$container as any) = originalAttribute; - }); - }); - + if (f.name === 'profiles') console.log(f.attributes.length); originalField.attributes - .filter((attr) => !f.attributes.find((d) => d.decl.$refText === attr.decl.$refText)) + .filter( + (attr) => + !f.attributes.find((d) => d.decl.$refText === attr.decl.$refText) && + !['@map', '@@map', '@default', '@updatedAt'].includes(attr.decl.$refText), + ) .forEach((attr) => { const field = attr.$container; const index = field.attributes.findIndex((d) => d === attr); field.attributes.splice(index, 1); - //console.log(`Delete attribute from field:${field.name} ${attr.decl.$refText}`); + console.log(`Delete attribute from field:${field.name} ${attr.decl.$refText}`); }); }); - originalModel.fields + originalDataModel.fields .filter( (f) => - !declaration.fields.find( - (d) => + !newDataModel.fields.find((d) => { + return ( getDbName(d) === getDbName(f) || (getRelationFkName(d as any) === getRelationFkName(f as any) && !!getRelationFkName(d as any) && - !!getRelationFkName(f as any)), - ), + !!getRelationFkName(f as any)) || + (f.$type === 'DataField' && + d.$type === 'DataField' && + f.type.reference?.ref && + d.type.reference?.ref && + getDbName(f.type.reference.ref) === getDbName(d.type.reference.ref)) + ); + }), ) .forEach((f) => { - const model = f.$container; - const index = model.fields.findIndex((d) => d === f); - model.fields.splice(index, 1); - //console.log(`Delete field ${f.name}`); + const _model = f.$container; + const index = _model.fields.findIndex((d) => d === f); + _model.fields.splice(index, 1); + console.log(`Delete field ${f.name}`); }); }); diff --git a/packages/cli/src/actions/pull/index.ts b/packages/cli/src/actions/pull/index.ts index 19d86a22..60c19204 100644 --- a/packages/cli/src/actions/pull/index.ts +++ b/packages/cli/src/actions/pull/index.ts @@ -15,57 +15,73 @@ import { EnumFactory, } from '@zenstackhq/language/factory'; import type { PullOptions } from '../db'; -import type { Cascade, IntrospectedEnum, IntrospectedTable, IntrospectionProvider } from './provider'; +import { type Cascade, type IntrospectedEnum, type IntrospectedTable, type IntrospectionProvider } from './provider'; import { getAttributeRef, getDbName, getEnumRef } from './utils'; export function syncEnums({ dbEnums, model, + oldModel, + provider, options, services, defaultSchema, }: { dbEnums: IntrospectedEnum[]; model: Model; + oldModel: Model; + provider: IntrospectionProvider; services: ZModelServices; options: PullOptions; defaultSchema: string; }) { - for (const dbEnum of dbEnums) { - const { modified, name } = resolveNameCasing(options.modelCasing, dbEnum.enum_type); - if (modified) console.log(`Mapping enum ${dbEnum.enum_type} to ${name}`); - const factory = new EnumFactory().setName(name); - if (modified || options.alwaysMap) - factory.addAttribute((builder) => - builder - .setDecl(getAttributeRef('@@map', services)) - .addArg((argBuilder) => argBuilder.StringLiteral.setValue(dbEnum.enum_type)), - ); + if (provider.isSupportedFeature('NativeEnum')) { + for (const dbEnum of dbEnums) { + const { modified, name } = resolveNameCasing(options.modelCasing, dbEnum.enum_type); + if (modified) console.log(`Mapping enum ${dbEnum.enum_type} to ${name}`); + const factory = new EnumFactory().setName(name); + if (modified || options.alwaysMap) + factory.addAttribute((builder) => + builder + .setDecl(getAttributeRef('@@map', services)) + .addArg((argBuilder) => argBuilder.StringLiteral.setValue(dbEnum.enum_type)), + ); - dbEnum.values.forEach((v) => { - const { name, modified } = resolveNameCasing(options.fieldCasing, v); - factory.addField((builder) => { - builder.setName(name); - if (modified || options.alwaysMap) - builder.addAttribute((builder) => - builder - .setDecl(getAttributeRef('@map', services)) - .addArg((argBuilder) => argBuilder.StringLiteral.setValue(v)), - ); - - return builder; + dbEnum.values.forEach((v) => { + const { name, modified } = resolveNameCasing(options.fieldCasing, v); + factory.addField((builder) => { + builder.setName(name); + if (modified || options.alwaysMap) + builder.addAttribute((builder) => + builder + .setDecl(getAttributeRef('@map', services)) + .addArg((argBuilder) => argBuilder.StringLiteral.setValue(v)), + ); + + return builder; + }); }); - }); - if (dbEnum.schema_name && dbEnum.schema_name !== '' && dbEnum.schema_name !== defaultSchema) { - factory.addAttribute((b) => - b - .setDecl(getAttributeRef('@@schema', services)) - .addArg((a) => a.StringLiteral.setValue(dbEnum.schema_name)), - ); - } + if (dbEnum.schema_name && dbEnum.schema_name !== '' && dbEnum.schema_name !== defaultSchema) { + factory.addAttribute((b) => + b + .setDecl(getAttributeRef('@@schema', services)) + .addArg((a) => a.StringLiteral.setValue(dbEnum.schema_name)), + ); + } - model.declarations.push(factory.get({ $container: model })); + model.declarations.push(factory.get({ $container: model })); + } + } else { + oldModel.declarations + .filter((d) => isEnum(d)) + .forEach((d) => { + const factory = new EnumFactory().setName(d.name); + d.fields.forEach((v) => { + factory.addField((builder) => builder.setName(v.name)); + }); + model.declarations.push(factory.get({ $container: model })); + }); } } @@ -140,9 +156,11 @@ export function syncTable({ services, options, defaultSchema, + oldModel, }: { table: IntrospectedTable; model: Model; + oldModel: Model; provider: IntrospectionProvider; services: ZModelServices; options: PullOptions; @@ -182,7 +200,6 @@ export function syncTable({ builder.setDecl(tableMapAttribute).addArg((argBuilder) => argBuilder.StringLiteral.setValue(table.name)), ); } - table.columns.forEach((column) => { if (column.foreign_key_table) { relations.push({ @@ -369,7 +386,6 @@ export function syncTable({ .addArg((argBuilder) => argBuilder.StringLiteral.setValue(index.name), 'map'), ); }); - if (table.schema && table.schema !== '' && table.schema !== defaultSchema) { modelFactory.addAttribute((b) => b.setDecl(getAttributeRef('@@schema', services)).addArg((a) => a.StringLiteral.setValue(table.schema)), @@ -377,7 +393,6 @@ export function syncTable({ } model.declarations.push(modelFactory.node); - return relations; } @@ -427,7 +442,10 @@ export function syncRelation({ const fieldPrefix = /[0-9]/g.test(sourceModel.name.charAt(0)) ? '_' : ''; const relationName = `${relation.table}${simmilarRelations > 1 ? `_${relation.column}` : ''}To${relation.references.table}`; - let sourceFieldName = `${fieldPrefix}${sourceModel.name.charAt(0).toLowerCase()}${sourceModel.name.slice(1)}_${relation.column}`; + let sourceFieldName = + simmilarRelations > 0 + ? `${fieldPrefix}${sourceModel.name.charAt(0).toLowerCase()}${sourceModel.name.slice(1)}_${relation.column}` + : targetModel.name; if (sourceModel.fields.find((f) => f.name === sourceFieldName)) { sourceFieldName = `${sourceFieldName}To${targetModel.name.charAt(0).toLowerCase()}${targetModel.name.slice(1)}_${relation.references.column}`; @@ -441,47 +459,47 @@ export function syncRelation({ .setOptional(relation.nullable) .setArray(relation.type === 'many') .setReference(targetModel), - ) - .addAttribute((ab) => { - ab.setDecl(relationAttribute); - if (includeRelationName) ab.addArg((ab) => ab.StringLiteral.setValue(relationName)); - ab.addArg((ab) => ab.ArrayExpr.addItem((aeb) => aeb.ReferenceExpr.setTarget(sourceField)), 'fields').addArg( - (ab) => ab.ArrayExpr.addItem((aeb) => aeb.ReferenceExpr.setTarget(targetField)), - 'references', - ); + ); + sourceFieldFactory.addAttribute((ab) => { + ab.setDecl(relationAttribute); + if (includeRelationName) ab.addArg((ab) => ab.StringLiteral.setValue(relationName)); + ab.addArg((ab) => ab.ArrayExpr.addItem((aeb) => aeb.ReferenceExpr.setTarget(sourceField)), 'fields').addArg( + (ab) => ab.ArrayExpr.addItem((aeb) => aeb.ReferenceExpr.setTarget(targetField)), + 'references', + ); - if (relation.foreign_key_on_delete && relation.foreign_key_on_delete !== 'SET NULL') { - const enumRef = getEnumRef('ReferentialAction', services); - if (!enumRef) throw new Error('ReferentialAction enum not found'); - const enumFieldRef = enumRef.fields.find( - (f) => f.name.toLowerCase() === relation.foreign_key_on_delete!.replace(/ /g, '').toLowerCase(), - ); - if (!enumFieldRef) throw new Error(`ReferentialAction ${relation.foreign_key_on_delete} not found`); - ab.addArg((a) => a.ReferenceExpr.setTarget(enumFieldRef), 'onDelete'); - } + if (relation.foreign_key_on_delete && relation.foreign_key_on_delete !== 'SET NULL') { + const enumRef = getEnumRef('ReferentialAction', services); + if (!enumRef) throw new Error('ReferentialAction enum not found'); + const enumFieldRef = enumRef.fields.find( + (f) => f.name.toLowerCase() === relation.foreign_key_on_delete!.replace(/ /g, '').toLowerCase(), + ); + if (!enumFieldRef) throw new Error(`ReferentialAction ${relation.foreign_key_on_delete} not found`); + ab.addArg((a) => a.ReferenceExpr.setTarget(enumFieldRef), 'onDelete'); + } - if (relation.foreign_key_on_update && relation.foreign_key_on_update !== 'SET NULL') { - const enumRef = getEnumRef('ReferentialAction', services); - if (!enumRef) throw new Error('ReferentialAction enum not found'); - const enumFieldRef = enumRef.fields.find( - (f) => f.name.toLowerCase() === relation.foreign_key_on_update!.replace(/ /g, '').toLowerCase(), - ); - if (!enumFieldRef) throw new Error(`ReferentialAction ${relation.foreign_key_on_update} not found`); - ab.addArg((a) => a.ReferenceExpr.setTarget(enumFieldRef), 'onUpdate'); - } + if (relation.foreign_key_on_update && relation.foreign_key_on_update !== 'SET NULL') { + const enumRef = getEnumRef('ReferentialAction', services); + if (!enumRef) throw new Error('ReferentialAction enum not found'); + const enumFieldRef = enumRef.fields.find( + (f) => f.name.toLowerCase() === relation.foreign_key_on_update!.replace(/ /g, '').toLowerCase(), + ); + if (!enumFieldRef) throw new Error(`ReferentialAction ${relation.foreign_key_on_update} not found`); + ab.addArg((a) => a.ReferenceExpr.setTarget(enumFieldRef), 'onUpdate'); + } - ab.addArg((ab) => ab.StringLiteral.setValue(relation.fk_name), 'map'); + if (relation.fk_name) ab.addArg((ab) => ab.StringLiteral.setValue(relation.fk_name), 'map'); - return ab; - }); + return ab; + }); sourceModel.fields.push(sourceFieldFactory.node); const oppositeFieldPrefix = /[0-9]/g.test(targetModel.name.charAt(0)) ? '_' : ''; const oppositeFieldName = - relation.type === 'one' - ? `${oppositeFieldPrefix}${sourceModel.name.charAt(0).toLowerCase()}${sourceModel.name.slice(1)}_${relation.column}s` - : `${oppositeFieldPrefix}${sourceModel.name.charAt(0).toLowerCase()}${sourceModel.name.slice(1)}_${relation.column}`; + simmilarRelations > 0 + ? `${oppositeFieldPrefix}${sourceModel.name.charAt(0).toLowerCase()}${sourceModel.name.slice(1)}_${relation.column}` + : sourceModel.name; const targetFieldFactory = new DataFieldFactory() .setContainer(targetModel) @@ -498,4 +516,9 @@ export function syncRelation({ ); targetModel.fields.push(targetFieldFactory.node); + + targetModel.fields.sort((a, b) => { + if (a.type.reference && b.type.reference) return 0; + return a.name.localeCompare(b.name); + }); } diff --git a/packages/cli/src/actions/pull/provider/postgresql.ts b/packages/cli/src/actions/pull/provider/postgresql.ts index 8e1457a3..958b0930 100644 --- a/packages/cli/src/actions/pull/provider/postgresql.ts +++ b/packages/cli/src/actions/pull/provider/postgresql.ts @@ -1,10 +1,18 @@ +import type { BuiltinType } from '@zenstackhq/language/ast'; import { DataFieldAttributeFactory } from '@zenstackhq/language/factory'; import { Client } from 'pg'; import { getAttributeRef, getDbName, getFunctionRef } from '../utils'; import type { IntrospectedEnum, IntrospectedSchema, IntrospectedTable, IntrospectionProvider } from './provider'; -import type { BuiltinType } from '@zenstackhq/language/ast'; export const postgresql: IntrospectionProvider = { + isSupportedFeature(feature) { + switch (feature) { + case 'Schema': + return true; + default: + return false; + } + }, getBuiltinType(type) { const t = (type || '').toLowerCase(); diff --git a/packages/cli/src/actions/pull/provider/provider.ts b/packages/cli/src/actions/pull/provider/provider.ts index 0bdc2d67..252a8a30 100644 --- a/packages/cli/src/actions/pull/provider/provider.ts +++ b/packages/cli/src/actions/pull/provider/provider.ts @@ -58,6 +58,8 @@ export type IntrospectedSchema = { enums: IntrospectedEnum[]; }; +export type DatabaseFeature = 'Schema' | 'NativeEnum'; + export interface IntrospectionProvider { introspect(connectionString: string): Promise; getBuiltinType(type: string): { @@ -71,4 +73,5 @@ export interface IntrospectionProvider { services: ZModelServices; enums: Enum[]; }): DataFieldAttributeFactory[]; + isSupportedFeature(feature: DatabaseFeature): boolean; } diff --git a/packages/cli/src/actions/pull/provider/sqlite.ts b/packages/cli/src/actions/pull/provider/sqlite.ts index 5c339760..e940b359 100644 --- a/packages/cli/src/actions/pull/provider/sqlite.ts +++ b/packages/cli/src/actions/pull/provider/sqlite.ts @@ -1,98 +1,52 @@ import type { BuiltinType } from '@zenstackhq/language/ast'; import type { IntrospectedEnum, IntrospectedSchema, IntrospectedTable, IntrospectionProvider } from './provider'; +import { DataFieldAttributeFactory } from '@zenstackhq/language/factory'; +import { getAttributeRef, getDbName, getFunctionRef } from '../utils'; // Note: We dynamically import better-sqlite3 inside the async function to avoid // requiring it at module load time for environments that don't use SQLite. export const sqlite: IntrospectionProvider = { + isSupportedFeature(feature) { + switch (feature) { + case 'Schema': + case 'NativeEnum': + default: + return false; + } + }, getBuiltinType(type) { const t = (type || '').toLowerCase().trim(); - // SQLite has no array types const isArray = false; - switch (t) { - // integers - case 'int': case 'integer': - case 'tinyint': - case 'smallint': - case 'mediumint': return { type: 'Int', isArray }; + case 'text': + return { type: 'String', isArray }; case 'bigint': return { type: 'BigInt', isArray }; - - // decimals and floats + case 'blob': + return { type: 'Bytes', isArray }; + case 'real': + return { type: 'Float', isArray }; case 'numeric': case 'decimal': return { type: 'Decimal', isArray }; - case 'real': - case 'double': - case 'double precision': - case 'float': - return { type: 'Float', isArray }; - - // boolean (SQLite stores as integer 0/1, but commonly typed as BOOLEAN) - case 'bool': - case 'boolean': - return { type: 'Boolean', isArray }; - - // strings - case 'text': - case 'varchar': - case 'character varying': - case 'char': - case 'character': - case 'clob': - case 'uuid': // often stored as TEXT - return { type: 'String', isArray }; - - // dates/times (stored as TEXT/REAL/INTEGER, but commonly typed as DATE/DATETIME) - case 'date': case 'datetime': return { type: 'DateTime', isArray }; - - // binary - case 'blob': - return { type: 'Bytes', isArray }; - - // json (not a native type, but commonly used) - case 'json': + case 'jsonb': return { type: 'Json', isArray }; - + case 'boolean': + return { type: 'Boolean', isArray }; default: { - // Fallbacks based on SQLite type affinity rules - if (t.includes('int')) return { type: 'Int', isArray }; - if (t.includes('char') || t.includes('clob') || t.includes('text')) return { type: 'String', isArray }; - if (t.includes('blob')) return { type: 'Bytes', isArray }; - if (t.includes('real') || t.includes('floa') || t.includes('doub')) return { type: 'Float', isArray }; - if (t.includes('dec') || t.includes('num')) return { type: 'Decimal', isArray }; return { type: 'Unsupported' as const, isArray }; } } }, - getDefaultDatabaseType(type: BuiltinType) { - switch (type) { - case 'String': - return { type: 'TEXT' }; - case 'Boolean': - return { type: 'INTEGER' }; - case 'Int': - return { type: 'INTEGER' }; - case 'BigInt': - return { type: 'INTEGER' }; - case 'Float': - return { type: 'REAL' }; - case 'Decimal': - return { type: 'DECIMAL' }; - case 'DateTime': - return { type: 'NUMERIC' }; - case 'Json': - return { type: 'JSONB' }; - case 'Bytes': - return { type: 'BLOB' }; - } + getDefaultDatabaseType() { + return undefined; }, async introspect(connectionString: string): Promise { @@ -114,7 +68,7 @@ export const sqlite: IntrospectionProvider = { for (const t of tablesRaw) { const tableName = t.name; - const schema = 'main'; + const schema = ''; // Columns with extended info; filter out hidden=1 (internal/rowid), mark computed if hidden=2 (generated) const columnsInfo = all<{ @@ -193,7 +147,7 @@ export const sqlite: IntrospectionProvider = { for (const fk of fkRows) { fkByColumn.set(fk.from, { - foreign_key_schema: 'main', + foreign_key_schema: '', foreign_key_table: fk.table || null, foreign_key_column: fk.to || null, foreign_key_name: null, @@ -228,7 +182,7 @@ export const sqlite: IntrospectionProvider = { default: c.dflt_value, options: [], unique: uniqueSingleColumn.has(c.name), - unique_name: uniqueSingleColumn.has(c.name) ? `${tableName}_${c.name}_unique` : null, + unique_name: null, }); } @@ -243,7 +197,46 @@ export const sqlite: IntrospectionProvider = { } }, - getDefaultValue(_args) { - throw new Error('Not implemented yet for SQLite'); + getDefaultValue({ defaultValue, fieldName, services, enums }) { + const val = defaultValue.trim(); + const factories: DataFieldAttributeFactory[] = []; + + const defaultAttr = new DataFieldAttributeFactory().setDecl(getAttributeRef('@default', services)); + + if (val === 'CURRENT_TIMESTAMP' || val === 'now()') { + factories.push(defaultAttr.addArg((ab) => ab.InvocationExpr.setFunction(getFunctionRef('now', services)))); + + if (fieldName.toLowerCase() === 'updatedat' || fieldName.toLowerCase() === 'updated_at') { + factories.push(new DataFieldAttributeFactory().setDecl(getAttributeRef('@updatedAt', services))); + } + return factories; + } + + if (val === 'true' || val === 'false') { + factories.push(defaultAttr.addArg((a) => a.BooleanLiteral.setValue(val === 'true'))); + return factories; + } + + if (!Number.isNaN(parseFloat(val)) || !Number.isNaN(parseInt(val))) { + factories.push(defaultAttr.addArg((a) => a.NumberLiteral.setValue(val))); + return factories; + } + + if (val.startsWith("'") && val.endsWith("'")) { + const strippedName = val.slice(1, -1); + const enumDef = enums.find((e) => e.fields.find((v) => getDbName(v) === strippedName)); + if (enumDef) { + const enumField = enumDef.fields.find((v) => getDbName(v) === strippedName); + if (enumField) factories.push(defaultAttr.addArg((ab) => ab.ReferenceExpr.setTarget(enumField))); + } else { + factories.push(defaultAttr.addArg((a) => a.StringLiteral.setValue(strippedName))); + } + return factories; + } + + //TODO: add more default value factories if exists + throw new Error( + `This default value type currently is not supported. Plesase open an issue on github. Values: "${defaultValue}"`, + ); }, }; diff --git a/packages/cli/src/actions/pull/utils.ts b/packages/cli/src/actions/pull/utils.ts index 05aa31ac..e017bb9b 100644 --- a/packages/cli/src/actions/pull/utils.ts +++ b/packages/cli/src/actions/pull/utils.ts @@ -53,6 +53,11 @@ export function getDatasource(model: Model) { throw new Error('The url field must be a string literal or an env().'); } + if (url.startsWith('file:')) { + url = new URL(url, `file:${model.$document!.uri.path}`).pathname; + if (process.platform === 'win32' && url[0] === '/') url = url.slice(1); + } + const defaultSchemaField = datasource.fields.find((f) => f.name === 'defaultSchema'); const defaultSchema = (defaultSchemaField && getStringLiteral(defaultSchemaField.value)) || 'public'; diff --git a/packages/cli/src/index.ts b/packages/cli/src/index.ts index 97cc89dd..80b84b8e 100644 --- a/packages/cli/src/index.ts +++ b/packages/cli/src/index.ts @@ -159,7 +159,7 @@ function createProgram() { .addOption( new Option('--quote ', 'set the quote style of generated schema files').default('double'), ) - .addOption(new Option('--indent ', 'set the indentation of the generated schema files').default(2)) + .addOption(new Option('--indent ', 'set the indentation of the generated schema files').default(4)) .action((options) => dbAction('pull', options)); dbCommand diff --git a/packages/cli/src/test.ts b/packages/cli/src/test.ts new file mode 100644 index 00000000..b83716df --- /dev/null +++ b/packages/cli/src/test.ts @@ -0,0 +1,9 @@ +import { URI, Utils } from 'vscode-uri'; + +const base = URI.parse('file:/d/zenstack/'); +const relative = URI.parse('file:./c/asdasd.db'); +console.log(base); +console.log(relative); +console.log(Utils.resolvePath(base, relative.path)); +// console.log(URI.parse('file:/c/asdasd.db')); +// console.log(URI.parse('file:./c/asdasd.db')); diff --git a/packages/cli/test/db/pull.test.ts b/packages/cli/test/db/pull.test.ts new file mode 100644 index 00000000..45cdde44 --- /dev/null +++ b/packages/cli/test/db/pull.test.ts @@ -0,0 +1,96 @@ +import fs from 'node:fs'; +import path from 'node:path'; +import { describe, expect, it } from 'vitest'; +import { createProject, runCli } from '../utils'; + +const getSchema = (workDir: string) => fs.readFileSync(path.join(workDir, 'zenstack/schema.zmodel')).toString(); + +describe('DB pull', () => { + it('sqlite schema', () => { + const workDir = createProject(` +model User { + id String @id @default(cuid()) + email String @unique @map("email_address") + name String? @default("Anonymous") + role Role @default(USER) + profile Profile? + shared_profile Profile? @relation("shared") + posts Post[] + createdAt DateTime @default(now()) + updatedAt DateTime @updatedAt + jsonData Json? + balance Decimal @default(0.00) + isActive Boolean @default(true) + bigCounter BigInt @default(0) + bytes Bytes? + + @@index([role]) + @@map("users") +} + +model Profile { + id Int @id @default(autoincrement()) + user User @relation(fields: [userId], references: [id], onDelete: Cascade) + userId String @unique + user_shared User @relation("shared", fields: [shared_userId], references: [id], onDelete: Cascade) + shared_userId String @unique + bio String? + avatarUrl String? + + @@map("profiles") +} + +model Post { + id Int @id @default(autoincrement()) + author User @relation(fields: [authorId], references: [id], onDelete: Cascade) + authorId String + title String + content String? + published Boolean @default(false) + tags PostTag[] + createdAt DateTime @default(now()) + updatedAt DateTime @updatedAt + slug String + score Float @default(0.0) + metadata Json? + + @@unique([authorId, slug]) + @@index([authorId, published]) + @@map("posts") +} + +model Tag { + id Int @id @default(autoincrement()) + name String @unique + posts PostTag[] + createdAt DateTime @default(now()) + + @@index([name], name: "tag_name_idx") + @@map("tags") +} + +model PostTag { + post Post @relation(fields: [postId], references: [id], onDelete: Cascade) + postId Int + tag Tag @relation(fields: [tagId], references: [id], onDelete: Cascade) + tagId Int + assignedAt DateTime @default(now()) + note String? @default("initial") + + @@id([postId, tagId]) + @@map("post_tags") +} + +enum Role { + USER + ADMIN + MODERATOR +}`); + runCli('format', workDir); + runCli('db push', workDir); + + const originalSchema = getSchema(workDir); + runCli('db pull --indent 4', workDir); + expect(getSchema(workDir)).toEqual(originalSchema); + }); +}); diff --git a/packages/cli/test/db/push.test.ts b/packages/cli/test/db/push.test.ts new file mode 100644 index 00000000..78164aae --- /dev/null +++ b/packages/cli/test/db/push.test.ts @@ -0,0 +1,18 @@ +import fs from 'node:fs'; +import path from 'node:path'; +import { describe, expect, it } from 'vitest'; +import { createProject, runCli } from '../utils'; + +const model = ` +model User { + id String @id @default(cuid()) +} +`; + +describe('CLI db commands test', () => { + it('should generate a database with db push', () => { + const workDir = createProject(model); + runCli('db push', workDir); + expect(fs.existsSync(path.join(workDir, 'zenstack/dev.db'))).toBe(true); + }); +}); diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index abfe277a..51ba6c89 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -115,7 +115,7 @@ importers: version: 8.34.1(eslint@9.29.0(jiti@2.6.1))(typescript@5.9.3) vitest: specifier: ^4.0.14 - version: 4.0.14(@edge-runtime/vm@5.0.0)(@types/node@20.19.24)(happy-dom@20.0.10)(jiti@2.6.1)(jsdom@27.1.0)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.20.3)(yaml@2.8.0) + version: 4.0.15(@edge-runtime/vm@5.0.0)(@types/node@20.19.24)(happy-dom@20.0.10)(jiti@2.6.1)(jsdom@27.1.0)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.20.3)(yaml@2.8.0) yaml: specifier: ^2.8.0 version: 2.8.0 @@ -201,6 +201,9 @@ importers: ts-pattern: specifier: 'catalog:' version: 5.7.1 + vscode-uri: + specifier: ^3.1.0 + version: 3.1.0 devDependencies: '@types/better-sqlite3': specifier: 'catalog:' @@ -931,13 +934,13 @@ importers: devDependencies: '@types/pg': specifier: ^8.15.6 - version: 8.15.6 + version: 8.16.0 '@zenstackhq/typescript-config': specifier: workspace:* version: link:../../../packages/config/typescript-config bun-types: specifier: ^1.3.3 - version: 1.3.3 + version: 1.3.4 tests/runtimes/edge-runtime: dependencies: @@ -965,7 +968,7 @@ importers: devDependencies: '@types/pg': specifier: ^8.15.6 - version: 8.15.6 + version: 8.16.0 '@zenstackhq/typescript-config': specifier: workspace:* version: link:../../../packages/config/typescript-config @@ -1195,6 +1198,12 @@ packages: '@dxup/unimport@0.1.0': resolution: {integrity: sha512-6Q/Po8qGmlrShdG/R9+rpIhme9N/PGJumpvmwr1UAxGpt9DfOCt9kF8+yJkxhtPdJFL37KgUILZBRAkSU8cJZg==} + '@ecies/ciphers@0.2.5': + resolution: {integrity: sha512-GalEZH4JgOMHYYcYmVqnFirFsjZHeoGMDt9IxEnM9F7GRUUyUksJ7Ou53L83WHJq3RWKD3AcBpo0iQh0oMpf8A==} + engines: {bun: '>=1', deno: '>=2', node: '>=16'} + peerDependencies: + '@noble/ciphers': ^1.0.0 + '@edge-runtime/primitives@6.0.0': resolution: {integrity: sha512-FqoxaBT+prPBHBwE1WXS1ocnu/VLTQyZ6NMUBAdbP7N2hsFTTxMC/jMu2D/8GAlMQfxeuppcPuCUk/HO3fpIvA==} engines: {node: '>=18'} @@ -1203,12 +1212,6 @@ packages: resolution: {integrity: sha512-NKBGBSIKUG584qrS1tyxVpX/AKJKQw5HgjYEnPLC0QsTw79JrGn+qUr8CXFb955Iy7GUdiiUv1rJ6JBGvaKb6w==} engines: {node: '>=18'} - '@ecies/ciphers@0.2.5': - resolution: {integrity: sha512-GalEZH4JgOMHYYcYmVqnFirFsjZHeoGMDt9IxEnM9F7GRUUyUksJ7Ou53L83WHJq3RWKD3AcBpo0iQh0oMpf8A==} - engines: {bun: '>=1', deno: '>=2', node: '>=16'} - peerDependencies: - '@noble/ciphers': ^1.0.0 - '@emnapi/core@1.6.0': resolution: {integrity: sha512-zq/ay+9fNIJJtJiZxdTnXS20PllcYMX3OE23ESc4HK/bdYu3cOWYVhsOhVnXALfU/uqJIxn5NBPd9z4v+SfoSg==} @@ -3043,8 +3046,8 @@ packages: '@types/pg@8.11.11': resolution: {integrity: sha512-kGT1qKM8wJQ5qlawUrEkXgvMSXoV213KfMGXcwfDwUIfUHXqXYXOfS1nE1LINRJVVVx5wCm70XnFlMHaIcQAfw==} - '@types/pg@8.15.6': - resolution: {integrity: sha512-NoaMtzhxOrubeL/7UZuNTrejB4MPAJ0RpxZqXQf2qXuVlTPuG6Y8p4u9dKRaue4yjmC7ZhzVO2/Yyyn25znrPQ==} + '@types/pg@8.16.0': + resolution: {integrity: sha512-RmhMd/wD+CF8Dfo+cVIy3RR5cl8CyfXQ0tGgW6XBL8L4LM/UTEbNXYRbLwU6w+CgrKBNbrQWt4FUtTfaU5jSYQ==} '@types/pluralize@0.0.33': resolution: {integrity: sha512-JOqsl+ZoCpP4e8TDke9W79FDcSgPAR0l6pixx2JHkhnRjvShyYiAYw2LVsnA7K08Y6DeOnaU6ujmENO4os/cYg==} @@ -3336,11 +3339,11 @@ packages: vite: ^5.0.0 || ^6.0.0 || ^7.0.0 vue: ^3.2.25 - '@vitest/expect@4.0.14': - resolution: {integrity: sha512-RHk63V3zvRiYOWAV0rGEBRO820ce17hz7cI2kDmEdfQsBjT2luEKB5tCOc91u1oSQoUOZkSv3ZyzkdkSLD7lKw==} + '@vitest/expect@4.0.15': + resolution: {integrity: sha512-Gfyva9/GxPAWXIWjyGDli9O+waHDC0Q0jaLdFP1qPAUUfo1FEXPXUfUkp3eZA0sSq340vPycSyOlYUeM15Ft1w==} - '@vitest/mocker@4.0.14': - resolution: {integrity: sha512-RzS5NujlCzeRPF1MK7MXLiEFpkIXeMdQ+rN3Kk3tDI9j0mtbr7Nmuq67tpkOJQpgyClbOltCXMjLZicJHsH5Cg==} + '@vitest/mocker@4.0.15': + resolution: {integrity: sha512-CZ28GLfOEIFkvCFngN8Sfx5h+Se0zN+h4B7yOsPVCcgtiO7t5jt9xQh2E1UkFep+eb9fjyMfuC5gBypwb07fvQ==} peerDependencies: msw: ^2.4.9 vite: ^6.0.0 || ^7.0.0-0 @@ -3350,20 +3353,20 @@ packages: vite: optional: true - '@vitest/pretty-format@4.0.14': - resolution: {integrity: sha512-SOYPgujB6TITcJxgd3wmsLl+wZv+fy3av2PpiPpsWPZ6J1ySUYfScfpIt2Yv56ShJXR2MOA6q2KjKHN4EpdyRQ==} + '@vitest/pretty-format@4.0.15': + resolution: {integrity: sha512-SWdqR8vEv83WtZcrfLNqlqeQXlQLh2iilO1Wk1gv4eiHKjEzvgHb2OVc3mIPyhZE6F+CtfYjNlDJwP5MN6Km7A==} - '@vitest/runner@4.0.14': - resolution: {integrity: sha512-BsAIk3FAqxICqREbX8SetIteT8PiaUL/tgJjmhxJhCsigmzzH8xeadtp7LRnTpCVzvf0ib9BgAfKJHuhNllKLw==} + '@vitest/runner@4.0.15': + resolution: {integrity: sha512-+A+yMY8dGixUhHmNdPUxOh0la6uVzun86vAbuMT3hIDxMrAOmn5ILBHm8ajrqHE0t8R9T1dGnde1A5DTnmi3qw==} - '@vitest/snapshot@4.0.14': - resolution: {integrity: sha512-aQVBfT1PMzDSA16Y3Fp45a0q8nKexx6N5Amw3MX55BeTeZpoC08fGqEZqVmPcqN0ueZsuUQ9rriPMhZ3Mu19Ag==} + '@vitest/snapshot@4.0.15': + resolution: {integrity: sha512-A7Ob8EdFZJIBjLjeO0DZF4lqR6U7Ydi5/5LIZ0xcI+23lYlsYJAfGn8PrIWTYdZQRNnSRlzhg0zyGu37mVdy5g==} - '@vitest/spy@4.0.14': - resolution: {integrity: sha512-JmAZT1UtZooO0tpY3GRyiC/8W7dCs05UOq9rfsUUgEZEdq+DuHLmWhPsrTt0TiW7WYeL/hXpaE07AZ2RCk44hg==} + '@vitest/spy@4.0.15': + resolution: {integrity: sha512-+EIjOJmnY6mIfdXtE/bnozKEvTC4Uczg19yeZ2vtCz5Yyb0QQ31QWVQ8hswJ3Ysx/K2EqaNsVanjr//2+P3FHw==} - '@vitest/utils@4.0.14': - resolution: {integrity: sha512-hLqXZKAWNg8pI+SQXyXxWCTOpA3MvsqcbVeNgSi8x/CSN2wi26dSzn1wrOhmCmFjEvN9p8/kLFRHa6PI8jHazw==} + '@vitest/utils@4.0.15': + resolution: {integrity: sha512-HXjPW2w5dxhTD0dLwtYHDnelK3j8sR8cWIaLxr22evTyY6q8pRCjZSmhRWVjBaOVXChQd6AwMzi9pucorXCPZA==} '@volar/language-core@2.4.23': resolution: {integrity: sha512-hEEd5ET/oSmBC6pi1j6NaNYRWoAiDhINbT8rmwtINugR39loROSlufGdYMF9TaKGfz+ViGs1Idi3mAhnuPcoGQ==} @@ -3752,8 +3755,8 @@ packages: buffer@6.0.3: resolution: {integrity: sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA==} - bun-types@1.3.3: - resolution: {integrity: sha512-z3Xwlg7j2l9JY27x5Qn3Wlyos8YAp0kKRlrePAOjgjMGS5IG6E7Jnlx736vH9UVI4wUICwwhC9anYL++XeOgTQ==} + bun-types@1.3.4: + resolution: {integrity: sha512-5ua817+BZPZOlNaRgGBpZJOSAQ9RQ17pkwPD0yR7CfJg+r8DgIILByFifDTa+IPDDxzf5VNhtNlcKqFzDgJvlQ==} bundle-name@4.1.0: resolution: {integrity: sha512-tjwM5exMg6BGRI+kNmTntNsvdZS1X8BFYS6tnJ2hdH0kVxM6/eVZ2xy+FqStSWvYmtfFMDLIxurorHwDKfDz5Q==} @@ -4555,8 +4558,8 @@ packages: resolution: {integrity: sha512-XYfuKMvj4O35f/pOXLObndIRvyQ+/+6AhODh+OKWj9S9498pHHn/IMszH+gt0fBCRWMNfk1ZSp5x3AifmnI2vg==} engines: {node: '>=6'} - expect-type@1.2.2: - resolution: {integrity: sha512-JhFGDVJ7tmDJItKhYgJCGLOWjuK9vPxiXoUFLwLDc99NlmklilbiQJwoctZtt13+xMw91MCk/REan6MWHqDjyA==} + expect-type@1.3.0: + resolution: {integrity: sha512-knvyeauYhqjOYvQ66MznSMs83wmHrCycNEN6Ao+2AeYEfxUIkuiVxdEa1qlGEPK+We3n0THiDciYSsCcgW/DoA==} engines: {node: '>=12.0.0'} express@5.1.0: @@ -7010,6 +7013,10 @@ packages: tinyexec@1.0.1: resolution: {integrity: sha512-5uC6DDlmeqiOwCPmK9jMSdOuZTh8bU39Ys6yidB+UTt5hfZUPGAypSgFRiEp+jbi9qH40BLDvy85jIU88wKSqw==} + tinyexec@1.0.2: + resolution: {integrity: sha512-W/KYk+NFhkmsYpuHq5JykngiOCnxeVL8v8dFnqxSD8qEEdRfXk1SDM6JzNqcERbcGYj9tMrDQBYV9cjgnunFIg==} + engines: {node: '>=18'} + tinyglobby@0.2.14: resolution: {integrity: sha512-tX5e7OM1HnYr2+a2C/4V0htOcSQcoSTH9KgJnVvNm5zm/cyEWKJ7j7YutsH9CxMdtOkkLFy2AHrMci9IM8IPZQ==} engines: {node: '>=12.0.0'} @@ -7510,18 +7517,18 @@ packages: vite: optional: true - vitest@4.0.14: - resolution: {integrity: sha512-d9B2J9Cm9dN9+6nxMnnNJKJCtcyKfnHj15N6YNJfaFHRLua/d3sRKU9RuKmO9mB0XdFtUizlxfz/VPbd3OxGhw==} + vitest@4.0.15: + resolution: {integrity: sha512-n1RxDp8UJm6N0IbJLQo+yzLZ2sQCDyl1o0LeugbPWf8+8Fttp29GghsQBjYJVmWq3gBFfe9Hs1spR44vovn2wA==} engines: {node: ^20.0.0 || ^22.0.0 || >=24.0.0} hasBin: true peerDependencies: '@edge-runtime/vm': '*' '@opentelemetry/api': ^1.9.0 '@types/node': ^20.0.0 || ^22.0.0 || >=24.0.0 - '@vitest/browser-playwright': 4.0.14 - '@vitest/browser-preview': 4.0.14 - '@vitest/browser-webdriverio': 4.0.14 - '@vitest/ui': 4.0.14 + '@vitest/browser-playwright': 4.0.15 + '@vitest/browser-preview': 4.0.15 + '@vitest/browser-webdriverio': 4.0.15 + '@vitest/ui': 4.0.15 happy-dom: '*' jsdom: '*' peerDependenciesMeta: @@ -8077,16 +8084,16 @@ snapshots: '@dxup/unimport@0.1.0': {} + '@ecies/ciphers@0.2.5(@noble/ciphers@1.3.0)': + dependencies: + '@noble/ciphers': 1.3.0 + '@edge-runtime/primitives@6.0.0': {} '@edge-runtime/vm@5.0.0': dependencies: '@edge-runtime/primitives': 6.0.0 - '@ecies/ciphers@0.2.5(@noble/ciphers@1.3.0)': - dependencies: - '@noble/ciphers': 1.3.0 - '@emnapi/core@1.6.0': dependencies: '@emnapi/wasi-threads': 1.1.0 @@ -9804,7 +9811,7 @@ snapshots: pg-protocol: 1.10.3 pg-types: 4.0.2 - '@types/pg@8.15.6': + '@types/pg@8.16.0': dependencies: '@types/node': 20.19.24 pg-protocol: 1.10.3 @@ -9999,7 +10006,7 @@ snapshots: fast-glob: 3.3.3 is-glob: 4.0.3 minimatch: 9.0.5 - semver: 7.7.3 + semver: 7.7.2 ts-api-utils: 2.1.0(typescript@5.9.3) typescript: 5.9.3 transitivePeerDependencies: @@ -10155,43 +10162,43 @@ snapshots: vite: 7.1.12(@types/node@20.19.24)(jiti@2.6.1)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.20.3)(yaml@2.8.1) vue: 3.5.22(typescript@5.9.3) - '@vitest/expect@4.0.14': + '@vitest/expect@4.0.15': dependencies: '@standard-schema/spec': 1.0.0 '@types/chai': 5.2.2 - '@vitest/spy': 4.0.14 - '@vitest/utils': 4.0.14 + '@vitest/spy': 4.0.15 + '@vitest/utils': 4.0.15 chai: 6.2.1 tinyrainbow: 3.0.3 - '@vitest/mocker@4.0.14(vite@7.1.12(@types/node@20.19.24)(jiti@2.6.1)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.20.3)(yaml@2.8.0))': + '@vitest/mocker@4.0.15(vite@7.1.12(@types/node@20.19.24)(jiti@2.6.1)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.20.3)(yaml@2.8.0))': dependencies: - '@vitest/spy': 4.0.14 + '@vitest/spy': 4.0.15 estree-walker: 3.0.3 magic-string: 0.30.21 optionalDependencies: vite: 7.1.12(@types/node@20.19.24)(jiti@2.6.1)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.20.3)(yaml@2.8.0) - '@vitest/pretty-format@4.0.14': + '@vitest/pretty-format@4.0.15': dependencies: tinyrainbow: 3.0.3 - '@vitest/runner@4.0.14': + '@vitest/runner@4.0.15': dependencies: - '@vitest/utils': 4.0.14 + '@vitest/utils': 4.0.15 pathe: 2.0.3 - '@vitest/snapshot@4.0.14': + '@vitest/snapshot@4.0.15': dependencies: - '@vitest/pretty-format': 4.0.14 + '@vitest/pretty-format': 4.0.15 magic-string: 0.30.21 pathe: 2.0.3 - '@vitest/spy@4.0.14': {} + '@vitest/spy@4.0.15': {} - '@vitest/utils@4.0.14': + '@vitest/utils@4.0.15': dependencies: - '@vitest/pretty-format': 4.0.14 + '@vitest/pretty-format': 4.0.15 tinyrainbow: 3.0.3 '@volar/language-core@2.4.23': @@ -10668,7 +10675,7 @@ snapshots: base64-js: 1.5.1 ieee754: 1.2.1 - bun-types@1.3.3: + bun-types@1.3.4: dependencies: '@types/node': 20.19.24 @@ -11655,7 +11662,7 @@ snapshots: expand-template@2.0.3: {} - expect-type@1.2.2: {} + expect-type@1.3.0: {} express@5.1.0: dependencies: @@ -12478,7 +12485,7 @@ snapshots: kysely-bun-sqlite@0.4.0(kysely@0.28.8): dependencies: - bun-types: 1.3.3 + bun-types: 1.3.4 kysely: 0.28.8 kysely@0.28.8: {} @@ -14507,6 +14514,8 @@ snapshots: tinyexec@1.0.1: {} + tinyexec@1.0.2: {} + tinyglobby@0.2.14: dependencies: fdir: 6.4.6(picomatch@4.0.2) @@ -15026,24 +15035,24 @@ snapshots: optionalDependencies: vite: 7.1.12(@types/node@20.19.24)(jiti@2.6.1)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.20.3)(yaml@2.8.1) - vitest@4.0.14(@edge-runtime/vm@5.0.0)(@types/node@20.19.24)(happy-dom@20.0.10)(jiti@2.6.1)(jsdom@27.1.0)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.20.3)(yaml@2.8.0): + vitest@4.0.15(@edge-runtime/vm@5.0.0)(@types/node@20.19.24)(happy-dom@20.0.10)(jiti@2.6.1)(jsdom@27.1.0)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.20.3)(yaml@2.8.0): dependencies: - '@vitest/expect': 4.0.14 - '@vitest/mocker': 4.0.14(vite@7.1.12(@types/node@20.19.24)(jiti@2.6.1)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.20.3)(yaml@2.8.0)) - '@vitest/pretty-format': 4.0.14 - '@vitest/runner': 4.0.14 - '@vitest/snapshot': 4.0.14 - '@vitest/spy': 4.0.14 - '@vitest/utils': 4.0.14 + '@vitest/expect': 4.0.15 + '@vitest/mocker': 4.0.15(vite@7.1.12(@types/node@20.19.24)(jiti@2.6.1)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.20.3)(yaml@2.8.0)) + '@vitest/pretty-format': 4.0.15 + '@vitest/runner': 4.0.15 + '@vitest/snapshot': 4.0.15 + '@vitest/spy': 4.0.15 + '@vitest/utils': 4.0.15 es-module-lexer: 1.7.0 - expect-type: 1.2.2 + expect-type: 1.3.0 magic-string: 0.30.21 obug: 2.1.1 pathe: 2.0.3 picomatch: 4.0.3 std-env: 3.10.0 tinybench: 2.9.0 - tinyexec: 0.3.2 + tinyexec: 1.0.2 tinyglobby: 0.2.15 tinyrainbow: 3.0.3 vite: 7.1.12(@types/node@20.19.24)(jiti@2.6.1)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.20.3)(yaml@2.8.0) From 2f7e37928c82d9b3dc96730cf664d80e87bf5679 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Fri, 21 Nov 2025 01:25:23 +0100 Subject: [PATCH 24/29] fix: lint fix --- packages/cli/src/actions/db.ts | 10 ++++++++-- packages/cli/src/actions/pull/index.ts | 5 ++--- packages/cli/src/actions/pull/provider/sqlite.ts | 3 +-- 3 files changed, 11 insertions(+), 7 deletions(-) diff --git a/packages/cli/src/actions/db.ts b/packages/cli/src/actions/db.ts index 6ea1bc08..16fe6f19 100644 --- a/packages/cli/src/actions/db.ts +++ b/packages/cli/src/actions/db.ts @@ -1,10 +1,16 @@ import { config } from '@dotenvx/dotenvx'; import { ZModelCodeGenerator } from '@zenstackhq/language'; -import { type DataField, DataModel, Enum, type Model } from '@zenstackhq/language/ast'; +import { DataModel, Enum, type Model } from '@zenstackhq/language/ast'; import fs from 'node:fs'; import path from 'node:path'; import { execPrisma } from '../utils/exec-utils'; -import { generateTempPrismaSchema, getSchemaFile, handleSubProcessError, requireDataSourceUrl, loadSchemaDocumentWithServices } from './action-utils'; +import { + generateTempPrismaSchema, + getSchemaFile, + handleSubProcessError, + requireDataSourceUrl, + loadSchemaDocument, +} from './action-utils'; import { syncEnums, syncRelation, syncTable, type Relation } from './pull'; import { providers } from './pull/provider'; import { getDatasource, getDbName, getRelationFkName } from './pull/utils'; diff --git a/packages/cli/src/actions/pull/index.ts b/packages/cli/src/actions/pull/index.ts index 60c19204..4a661afb 100644 --- a/packages/cli/src/actions/pull/index.ts +++ b/packages/cli/src/actions/pull/index.ts @@ -2,11 +2,11 @@ import type { ZModelServices } from '@zenstackhq/language'; import { isEnum, type Attribute, + type BuiltinType, type DataField, type DataModel, type Enum, type Model, - type BuiltinType, } from '@zenstackhq/language/ast'; import { DataFieldAttributeFactory, @@ -15,7 +15,7 @@ import { EnumFactory, } from '@zenstackhq/language/factory'; import type { PullOptions } from '../db'; -import { type Cascade, type IntrospectedEnum, type IntrospectedTable, type IntrospectionProvider } from './provider'; +import type { Cascade, IntrospectedEnum, IntrospectedTable, IntrospectionProvider } from './provider'; import { getAttributeRef, getDbName, getEnumRef } from './utils'; export function syncEnums({ @@ -156,7 +156,6 @@ export function syncTable({ services, options, defaultSchema, - oldModel, }: { table: IntrospectedTable; model: Model; diff --git a/packages/cli/src/actions/pull/provider/sqlite.ts b/packages/cli/src/actions/pull/provider/sqlite.ts index e940b359..5825becd 100644 --- a/packages/cli/src/actions/pull/provider/sqlite.ts +++ b/packages/cli/src/actions/pull/provider/sqlite.ts @@ -1,7 +1,6 @@ -import type { BuiltinType } from '@zenstackhq/language/ast'; -import type { IntrospectedEnum, IntrospectedSchema, IntrospectedTable, IntrospectionProvider } from './provider'; import { DataFieldAttributeFactory } from '@zenstackhq/language/factory'; import { getAttributeRef, getDbName, getFunctionRef } from '../utils'; +import type { IntrospectedEnum, IntrospectedSchema, IntrospectedTable, IntrospectionProvider } from './provider'; // Note: We dynamically import better-sqlite3 inside the async function to avoid // requiring it at module load time for environments that don't use SQLite. From f2d97705739496b087f18dc3fa6a6a6ffd8b0179 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Fri, 21 Nov 2025 21:19:35 +0100 Subject: [PATCH 25/29] fix: formatting for e2e test schemas --- packages/cli/test/utils.ts | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/packages/cli/test/utils.ts b/packages/cli/test/utils.ts index 2fafb207..011eb57e 100644 --- a/packages/cli/test/utils.ts +++ b/packages/cli/test/utils.ts @@ -5,15 +5,14 @@ import path from 'node:path'; const ZMODEL_PRELUDE = `datasource db { provider = "sqlite" - url = "file:./dev.db" -} -`; + url = "file:./dev.db" +}`; export function createProject(zmodel: string, addPrelude = true) { const workDir = createTestProject(); fs.mkdirSync(path.join(workDir, 'zenstack'), { recursive: true }); const schemaPath = path.join(workDir, 'zenstack/schema.zmodel'); - fs.writeFileSync(schemaPath, addPrelude ? `${ZMODEL_PRELUDE}\n\n${zmodel}` : zmodel); + fs.writeFileSync(schemaPath, addPrelude ? `${ZMODEL_PRELUDE}\n${zmodel}` : zmodel); return workDir; } From 454772ed005cbb43030e99304998136c89c70c96 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Fri, 21 Nov 2025 21:40:41 +0100 Subject: [PATCH 26/29] test: run db pull e2e test also for postgres --- packages/cli/test/db/pull.test.ts | 6 ++-- packages/cli/test/utils.ts | 48 +++++++++++++++++++++++++++---- 2 files changed, 46 insertions(+), 8 deletions(-) diff --git a/packages/cli/test/db/pull.test.ts b/packages/cli/test/db/pull.test.ts index 45cdde44..35378c2e 100644 --- a/packages/cli/test/db/pull.test.ts +++ b/packages/cli/test/db/pull.test.ts @@ -6,9 +6,9 @@ import { createProject, runCli } from '../utils'; const getSchema = (workDir: string) => fs.readFileSync(path.join(workDir, 'zenstack/schema.zmodel')).toString(); describe('DB pull', () => { - it('sqlite schema', () => { - const workDir = createProject(` -model User { + it('simple schema', () => { + const workDir = createProject( +`model User { id String @id @default(cuid()) email String @unique @map("email_address") name String? @default("Anonymous") diff --git a/packages/cli/test/utils.ts b/packages/cli/test/utils.ts index 011eb57e..5a93100e 100644 --- a/packages/cli/test/utils.ts +++ b/packages/cli/test/utils.ts @@ -1,14 +1,52 @@ -import { createTestProject } from '@zenstackhq/testtools'; +import { createTestProject, getTestDbProvider } from '@zenstackhq/testtools'; +import { createHash } from 'node:crypto'; import { execSync } from 'node:child_process'; import fs from 'node:fs'; import path from 'node:path'; +import { expect } from 'vitest'; -const ZMODEL_PRELUDE = `datasource db { - provider = "sqlite" - url = "file:./dev.db" -}`; +const TEST_PG_CONFIG = { + host: process.env['TEST_PG_HOST'] ?? 'localhost', + port: process.env['TEST_PG_PORT'] ? parseInt(process.env['TEST_PG_PORT']) : 5432, + user: process.env['TEST_PG_USER'] ?? 'postgres', + password: process.env['TEST_PG_PASSWORD'] ?? 'postgres', +}; + +function getTestDbName(provider: string) { + if (provider === 'sqlite') { + return './test.db'; + } + const testName = expect.getState().currentTestName ?? 'unnamed'; + const testPath = expect.getState().testPath ?? ''; + // digest test name + const digest = createHash('md5') + .update(testName + testPath) + .digest('hex'); + // compute a database name based on test name + return ( + 'test_' + + testName + .toLowerCase() + .replace(/[^a-z0-9_]/g, '_') + .replace(/_+/g, '_') + .substring(0, 30) + + digest.slice(0, 6) + ); +} export function createProject(zmodel: string, addPrelude = true) { + const provider = getTestDbProvider() ?? 'sqlite'; + const dbName = getTestDbName(provider); + const dbUrl = + provider === 'sqlite' + ? `file:${dbName}` + : `postgres://${TEST_PG_CONFIG.user}:${TEST_PG_CONFIG.password}@${TEST_PG_CONFIG.host}:${TEST_PG_CONFIG.port}/${dbName}`; + + const ZMODEL_PRELUDE = `datasource db { + provider = "${provider}" + url = "${dbUrl}" +} +`; const workDir = createTestProject(); fs.mkdirSync(path.join(workDir, 'zenstack'), { recursive: true }); const schemaPath = path.join(workDir, 'zenstack/schema.zmodel'); From 68fe375e40501d8944f9cd4144c361b2cb97a571 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Sun, 23 Nov 2025 01:30:26 +0100 Subject: [PATCH 27/29] fix: postgres instorspection schema filter --- packages/cli/src/actions/db.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/cli/src/actions/db.ts b/packages/cli/src/actions/db.ts index 16fe6f19..87935313 100644 --- a/packages/cli/src/actions/db.ts +++ b/packages/cli/src/actions/db.ts @@ -101,10 +101,10 @@ async function runPull(options: PullOptions) { console.log('Starging introspect the database...'); const { enums: allEnums, tables: allTables } = await provider.introspect(datasource.url); const enums = provider.isSupportedFeature('Schema') - ? allEnums.filter((e) => datasource.schemas.includes(e.schema_name)) + ? allEnums.filter((e) => datasource.allSchemas.includes(e.schema_name)) : allEnums; const tables = provider.isSupportedFeature('Schema') - ? allTables.filter((t) => datasource.schemas.includes(t.schema)) + ? allTables.filter((t) => datasource.allSchemas.includes(t.schema)) : allTables; const newModel: Model = { From 1dc45fa9fb74d67c3fb10a98375ea5fe0715d2d3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Sun, 23 Nov 2025 01:32:49 +0100 Subject: [PATCH 28/29] test: update cli tests --- packages/cli/test/check.test.ts | 7 +------ packages/cli/test/db.test.ts | 10 +++++----- packages/cli/test/db/push.test.ts | 4 ++-- packages/cli/test/migrate.test.ts | 24 ++++++++++++------------ packages/cli/test/utils.ts | 9 ++++++--- 5 files changed, 26 insertions(+), 28 deletions(-) diff --git a/packages/cli/test/check.test.ts b/packages/cli/test/check.test.ts index 287bb6b8..60f80903 100644 --- a/packages/cli/test/check.test.ts +++ b/packages/cli/test/check.test.ts @@ -83,17 +83,12 @@ describe('CLI validate command test', () => { it('should validate schema with syntax errors', () => { const modelWithSyntaxError = ` -datasource db { - provider = "sqlite" - url = "file:./dev.db" -} - model User { id String @id @default(cuid()) email String @unique // Missing closing brace - syntax error `; - const workDir = createProject(modelWithSyntaxError, false); + const workDir = createProject(modelWithSyntaxError); // Should throw an error due to syntax error expect(() => runCli('check', workDir)).toThrow(); diff --git a/packages/cli/test/db.test.ts b/packages/cli/test/db.test.ts index 636dcff8..b17f92e5 100644 --- a/packages/cli/test/db.test.ts +++ b/packages/cli/test/db.test.ts @@ -11,13 +11,13 @@ model User { describe('CLI db commands test', () => { it('should generate a database with db push', () => { - const workDir = createProject(model); + const workDir = createProject(model, { provider: 'sqlite' }); runCli('db push', workDir); - expect(fs.existsSync(path.join(workDir, 'zenstack/dev.db'))).toBe(true); + expect(fs.existsSync(path.join(workDir, 'zenstack/test.db'))).toBe(true); }); it('should seed the database with db seed with seed script', () => { - const workDir = createProject(model); + const workDir = createProject(model, { provider: 'sqlite' }); const pkgJson = JSON.parse(fs.readFileSync(path.join(workDir, 'package.json'), 'utf8')); pkgJson.zenstack = { seed: 'node seed.js', @@ -36,7 +36,7 @@ fs.writeFileSync('seed.txt', 'success'); }); it('should seed the database after migrate reset', () => { - const workDir = createProject(model); + const workDir = createProject(model, { provider: 'sqlite' }); const pkgJson = JSON.parse(fs.readFileSync(path.join(workDir, 'package.json'), 'utf8')); pkgJson.zenstack = { seed: 'node seed.js', @@ -55,7 +55,7 @@ fs.writeFileSync('seed.txt', 'success'); }); it('should skip seeding the database without seed script', () => { - const workDir = createProject(model); + const workDir = createProject(model, { provider: 'sqlite' }); runCli('db seed', workDir); }); }); diff --git a/packages/cli/test/db/push.test.ts b/packages/cli/test/db/push.test.ts index 78164aae..9c688df4 100644 --- a/packages/cli/test/db/push.test.ts +++ b/packages/cli/test/db/push.test.ts @@ -11,8 +11,8 @@ model User { describe('CLI db commands test', () => { it('should generate a database with db push', () => { - const workDir = createProject(model); + const workDir = createProject(model, { provider: 'sqlite' }); runCli('db push', workDir); - expect(fs.existsSync(path.join(workDir, 'zenstack/dev.db'))).toBe(true); + expect(fs.existsSync(path.join(workDir, 'zenstack/test.db'))).toBe(true); }); }); diff --git a/packages/cli/test/migrate.test.ts b/packages/cli/test/migrate.test.ts index 56a0fec8..86abc357 100644 --- a/packages/cli/test/migrate.test.ts +++ b/packages/cli/test/migrate.test.ts @@ -11,36 +11,36 @@ model User { describe('CLI migrate commands test', () => { it('should generate a database with migrate dev', () => { - const workDir = createProject(model); + const workDir = createProject(model, { provider: 'sqlite' }); runCli('migrate dev --name init', workDir); - expect(fs.existsSync(path.join(workDir, 'zenstack/dev.db'))).toBe(true); + expect(fs.existsSync(path.join(workDir, 'zenstack/test.db'))).toBe(true); expect(fs.existsSync(path.join(workDir, 'zenstack/migrations'))).toBe(true); }); it('should reset the database with migrate reset', () => { - const workDir = createProject(model); + const workDir = createProject(model, { provider: 'sqlite' }); runCli('db push', workDir); - expect(fs.existsSync(path.join(workDir, 'zenstack/dev.db'))).toBe(true); + expect(fs.existsSync(path.join(workDir, 'zenstack/test.db'))).toBe(true); runCli('migrate reset --force', workDir); - expect(fs.existsSync(path.join(workDir, 'zenstack/dev.db'))).toBe(true); + expect(fs.existsSync(path.join(workDir, 'zenstack/test.db'))).toBe(true); }); it('should reset the database with migrate deploy', () => { - const workDir = createProject(model); + const workDir = createProject(model, { provider: 'sqlite' }); runCli('migrate dev --name init', workDir); - fs.rmSync(path.join(workDir, 'zenstack/dev.db')); + fs.rmSync(path.join(workDir, 'zenstack/test.db')); runCli('migrate deploy', workDir); - expect(fs.existsSync(path.join(workDir, 'zenstack/dev.db'))).toBe(true); + expect(fs.existsSync(path.join(workDir, 'zenstack/test.db'))).toBe(true); }); it('supports migrate status', () => { - const workDir = createProject(model); + const workDir = createProject(model, { provider: 'sqlite' }); runCli('migrate dev --name init', workDir); runCli('migrate status', workDir); }); it('supports migrate resolve', () => { - const workDir = createProject(model); + const workDir = createProject(model, { provider: 'sqlite' }); runCli('migrate dev --name init', workDir); // find the migration record "timestamp_init" @@ -51,7 +51,7 @@ describe('CLI migrate commands test', () => { fs.writeFileSync(path.join(workDir, 'zenstack/migrations', migration!, 'migration.sql'), 'invalid content'); // redeploy the migration, which will fail - fs.rmSync(path.join(workDir, 'zenstack/dev.db'), { force: true }); + fs.rmSync(path.join(workDir, 'zenstack/test.db'), { force: true }); try { runCli('migrate deploy', workDir); } catch { @@ -66,7 +66,7 @@ describe('CLI migrate commands test', () => { }); it('should throw error when neither applied nor rolled-back is provided', () => { - const workDir = createProject(model); + const workDir = createProject(model, { provider: 'sqlite' }); expect(() => runCli('migrate resolve', workDir)).toThrow(); }); }); diff --git a/packages/cli/test/utils.ts b/packages/cli/test/utils.ts index 5a93100e..7017b622 100644 --- a/packages/cli/test/utils.ts +++ b/packages/cli/test/utils.ts @@ -34,8 +34,11 @@ function getTestDbName(provider: string) { ); } -export function createProject(zmodel: string, addPrelude = true) { - const provider = getTestDbProvider() ?? 'sqlite'; +export function createProject( + zmodel: string, + options?: { customPrelude?: boolean; provider?: 'sqlite' | 'postgresql' }, +) { + const provider = (options?.provider || getTestDbProvider()) ?? 'sqlite'; const dbName = getTestDbName(provider); const dbUrl = provider === 'sqlite' @@ -50,7 +53,7 @@ export function createProject(zmodel: string, addPrelude = true) { const workDir = createTestProject(); fs.mkdirSync(path.join(workDir, 'zenstack'), { recursive: true }); const schemaPath = path.join(workDir, 'zenstack/schema.zmodel'); - fs.writeFileSync(schemaPath, addPrelude ? `${ZMODEL_PRELUDE}\n${zmodel}` : zmodel); + fs.writeFileSync(schemaPath, !options?.customPrelude ? `${ZMODEL_PRELUDE}\n${zmodel}` : zmodel); return workDir; } From 68708b47e26478b0c9590782a1341911f801a98e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Mon, 15 Dec 2025 22:22:22 +0100 Subject: [PATCH 29/29] feat(cli): Improves database introspection and syncing Enhances the `db pull` command with a spinner for better UX. Adds color-coded logging to highlight important steps. Provides more detailed output on schema changes, including deleted models, enums, added fields, and deleted attributes. Also includes minor improvements to enum mapping and constraint handling. --- packages/cli/src/actions/db.ts | 76 +++++++++++++++++++++----- packages/cli/src/actions/pull/index.ts | 17 +++--- 2 files changed, 73 insertions(+), 20 deletions(-) diff --git a/packages/cli/src/actions/db.ts b/packages/cli/src/actions/db.ts index 87935313..36e354b9 100644 --- a/packages/cli/src/actions/db.ts +++ b/packages/cli/src/actions/db.ts @@ -1,15 +1,17 @@ import { config } from '@dotenvx/dotenvx'; import { ZModelCodeGenerator } from '@zenstackhq/language'; import { DataModel, Enum, type Model } from '@zenstackhq/language/ast'; +import colors from 'colors'; import fs from 'node:fs'; import path from 'node:path'; +import ora from 'ora'; import { execPrisma } from '../utils/exec-utils'; import { generateTempPrismaSchema, getSchemaFile, handleSubProcessError, - requireDataSourceUrl, loadSchemaDocument, + requireDataSourceUrl, } from './action-utils'; import { syncEnums, syncRelation, syncTable, type Relation } from './pull'; import { providers } from './pull/provider'; @@ -77,6 +79,7 @@ async function runPush(options: PushOptions) { } async function runPull(options: PullOptions) { + const spinner = ora(); try { const schemaFile = getSchemaFile(options.schema); const { model, services } = await loadSchemaDocument(schemaFile, { returnServices: true }); @@ -98,8 +101,11 @@ async function runPull(options: PullOptions) { if (!provider) { throw new Error(`No introspection provider found for: ${datasource.provider}`); } - console.log('Starging introspect the database...'); + + spinner.start('Introspecting database...'); const { enums: allEnums, tables: allTables } = await provider.introspect(datasource.url); + spinner.succeed('Database introspected'); + const enums = provider.isSupportedFeature('Schema') ? allEnums.filter((e) => datasource.allSchemas.includes(e.schema_name)) : allEnums; @@ -107,6 +113,8 @@ async function runPull(options: PullOptions) { ? allTables.filter((t) => datasource.allSchemas.includes(t.schema)) : allTables; + console.log(colors.blue('Syncing schema...')); + const newModel: Model = { $type: 'Model', $container: undefined, @@ -165,12 +173,22 @@ async function runPull(options: PullOptions) { }); } + console.log(colors.blue('Schema synced')); + const cwd = new URL(`file://${process.cwd()}`).pathname; const docs = services.shared.workspace.LangiumDocuments.all .filter(({ uri }) => uri.path.toLowerCase().startsWith(cwd.toLowerCase())) .toArray(); const docsSet = new Set(docs.map((d) => d.uri.toString())); + console.log(colors.bold('\nApplying changes to ZModel...')); + + const deletedModels: string[] = []; + const deletedEnums: string[] = []; + const addedFields: string[] = []; + const deletedAttributes: string[] = []; + const deletedFields: string[] = []; + //Delete models services.shared.workspace.IndexManager.allElements('DataModel', docsSet) .filter( @@ -181,7 +199,7 @@ async function runPull(options: PullOptions) { const model = decl.node!.$container as Model; const index = model.declarations.findIndex((d) => d === decl.node); model.declarations.splice(index, 1); - console.log(`Delete model ${decl.name}`); + deletedModels.push(colors.red(`- Model ${decl.name} deleted`)); }); // Delete Enums @@ -195,7 +213,7 @@ async function runPull(options: PullOptions) { const model = decl.node!.$container as Model; const index = model.declarations.findIndex((d) => d === decl.node); model.declarations.splice(index, 1); - console.log(`Delete enum ${decl.name}`); + deletedEnums.push(colors.red(`- Enum ${decl.name} deleted`)); }); // newModel.declarations @@ -239,14 +257,16 @@ async function runPull(options: PullOptions) { if (originalFields.length > 1) { console.warn( - `Found more original fields, need to tweak the search algorith. ${originalDataModel.name}->[${originalFields.map((of) => of.name).join(', ')}](${f.name})`, + colors.yellow( + `Found more original fields, need to tweak the search algorith. ${originalDataModel.name}->[${originalFields.map((of) => of.name).join(', ')}](${f.name})`, + ), ); return; } const originalField = originalFields.at(0); Object.freeze(originalField); if (!originalField) { - console.log(`Added field ${f.name} to ${originalDataModel.name}`); + addedFields.push(colors.green(`+ Field ${f.name} added to ${originalDataModel.name}`)); (f as any).$container = originalDataModel; originalDataModel.fields.push(f as any); if (f.$type === 'DataField' && f.type.reference?.ref) { @@ -260,7 +280,7 @@ async function runPull(options: PullOptions) { } return; } - if (f.name === 'profiles') console.log(f.attributes.length); + originalField.attributes .filter( (attr) => @@ -271,7 +291,9 @@ async function runPull(options: PullOptions) { const field = attr.$container; const index = field.attributes.findIndex((d) => d === attr); field.attributes.splice(index, 1); - console.log(`Delete attribute from field:${field.name} ${attr.decl.$refText}`); + deletedAttributes.push( + colors.yellow(`- Attribute ${attr.decl.$refText} deleted from field: ${field.name}`), + ); }); }); originalDataModel.fields @@ -295,10 +317,35 @@ async function runPull(options: PullOptions) { const _model = f.$container; const index = _model.fields.findIndex((d) => d === f); _model.fields.splice(index, 1); - console.log(`Delete field ${f.name}`); + deletedFields.push(colors.red(`- Field ${f.name} deleted from ${_model.name}`)); }); }); + if (deletedModels.length > 0) { + console.log(colors.bold('\nDeleted Models:')); + deletedModels.forEach((msg) => console.log(msg)); + } + + if (deletedEnums.length > 0) { + console.log(colors.bold('\nDeleted Enums:')); + deletedEnums.forEach((msg) => console.log(msg)); + } + + if (addedFields.length > 0) { + console.log(colors.bold('\nAdded Fields:')); + addedFields.forEach((msg) => console.log(msg)); + } + + if (deletedAttributes.length > 0) { + console.log(colors.bold('\nDeleted Attributes:')); + deletedAttributes.forEach((msg) => console.log(msg)); + } + + if (deletedFields.length > 0) { + console.log(colors.bold('\nDeleted Fields:')); + deletedFields.forEach((msg) => console.log(msg)); + } + if (options.out && !fs.lstatSync(options.out).isFile()) { throw new Error(`Output path ${options.out} is not a file`); } @@ -311,7 +358,7 @@ async function runPull(options: PullOptions) { if (options.out) { const zmodelSchema = generator.generate(newModel); - console.log(`Writing to ${options.out}`); + console.log(colors.blue(`Writing to ${options.out}`)); const outPath = options.out ? path.resolve(options.out) : schemaFile; @@ -319,12 +366,15 @@ async function runPull(options: PullOptions) { } else { docs.forEach(({ uri, parseResult: { value: model } }) => { const zmodelSchema = generator.generate(model); - console.log(`Writing to ${uri.path}`); + console.log(colors.blue(`Writing to ${uri.path}`)); fs.writeFileSync(uri.fsPath, zmodelSchema); }); } + + console.log(colors.green.bold('\nPull completed successfully!')); } catch (error) { - console.log(error); + spinner.fail('Pull failed'); + console.error(error); throw error; } -} +} \ No newline at end of file diff --git a/packages/cli/src/actions/pull/index.ts b/packages/cli/src/actions/pull/index.ts index 4a661afb..68503829 100644 --- a/packages/cli/src/actions/pull/index.ts +++ b/packages/cli/src/actions/pull/index.ts @@ -1,4 +1,5 @@ import type { ZModelServices } from '@zenstackhq/language'; +import colors from 'colors'; import { isEnum, type Attribute, @@ -38,7 +39,7 @@ export function syncEnums({ if (provider.isSupportedFeature('NativeEnum')) { for (const dbEnum of dbEnums) { const { modified, name } = resolveNameCasing(options.modelCasing, dbEnum.enum_type); - if (modified) console.log(`Mapping enum ${dbEnum.enum_type} to ${name}`); + if (modified) console.log(colors.gray(`Mapping enum ${dbEnum.enum_type} to ${name}`)); const factory = new EnumFactory().setName(name); if (modified || options.alwaysMap) factory.addAttribute((builder) => @@ -344,16 +345,18 @@ export function syncTable({ table.indexes.forEach((index) => { if (index.predicate) { //These constraints are not supported by Zenstack, because Zenstack currently does not fully support check constraints. Read more: https://pris.ly/d/check-constraints - console.log( - 'These constraints are not supported by Zenstack. Read more: https://pris.ly/d/check-constraints', - `- Model: "${table.name}", constraint: "${index.name}"`, + console.warn( + colors.yellow( + `These constraints are not supported by Zenstack. Read more: https://pris.ly/d/check-constraints\n- Model: "${table.name}", constraint: "${index.name}"`, + ), ); return; } if (index.columns.find((c) => c.expression)) { - console.log( - 'These constraints are not supported by Zenstack. Read more: https://pris.ly/d/check-constraints', - `- Model: "${table.name}", constraint: "${index.name}"`, + console.warn( + colors.yellow( + `These constraints are not supported by Zenstack. Read more: https://pris.ly/d/check-constraints\n- Model: "${table.name}", constraint: "${index.name}"`, + ), ); return; }