diff --git a/changelogs/drizzle-orm/0.36.1.md b/changelogs/drizzle-orm/0.36.1.md index 1c0e96756..74d256a13 100644 --- a/changelogs/drizzle-orm/0.36.1.md +++ b/changelogs/drizzle-orm/0.36.1.md @@ -3,4 +3,4 @@ - [[BUG]: Using sql.placeholder with limit and/or offset for a prepared statement produces TS error](https://github.com/drizzle-team/drizzle-orm/issues/2146) - thanks @L-Mario564 - [[BUG] If a query I am trying to modify with a dynamic query (....$dynamic()) contains any placeholders, I'm getting an error that says No value for placeholder.... provided](https://github.com/drizzle-team/drizzle-orm/issues/2272) - thanks @L-Mario564 - [[BUG]: Error thrown when trying to insert an array of new rows using generatedAlwaysAsIdentity() for the id column](https://github.com/drizzle-team/drizzle-orm/issues/2849) - thanks @L-Mario564 -- [[BUG]: Unable to Use BigInt Types with Bun and Drizzle](https://github.com/drizzle-team/drizzle-orm/issues/2603) - thanks @L-Mario564 \ No newline at end of file +- [[BUG]: Unable to Use BigInt Types with Bun and Drizzle](https://github.com/drizzle-team/drizzle-orm/issues/2603) - thanks @L-Mario564 diff --git a/drizzle-kit/package.json b/drizzle-kit/package.json index 0b3b2562b..fbf1fd162 100644 --- a/drizzle-kit/package.json +++ b/drizzle-kit/package.json @@ -7,6 +7,7 @@ "orm", "pg", "mysql", + "singlestore", "postgresql", "postgres", "sqlite", diff --git a/drizzle-kit/src/api.ts b/drizzle-kit/src/api.ts index b18ed95f4..18107bd34 100644 --- a/drizzle-kit/src/api.ts +++ b/drizzle-kit/src/api.ts @@ -2,6 +2,7 @@ import { randomUUID } from 'crypto'; import { LibSQLDatabase } from 'drizzle-orm/libsql'; import type { MySql2Database } from 'drizzle-orm/mysql2'; import { PgDatabase } from 'drizzle-orm/pg-core'; +import { SingleStoreDriverDatabase } from 'drizzle-orm/singlestore'; import { columnsResolver, enumsResolver, @@ -30,12 +31,19 @@ import { generateMySqlSnapshot } from './serializer/mysqlSerializer'; import { prepareFromExports } from './serializer/pgImports'; import { PgSchema as PgSchemaKit, pgSchema, squashPgScheme } from './serializer/pgSchema'; import { generatePgSnapshot } from './serializer/pgSerializer'; +import { + SingleStoreSchema as SingleStoreSchemaKit, + singlestoreSchema, + squashSingleStoreScheme, +} from './serializer/singlestoreSchema'; +import { generateSingleStoreSnapshot } from './serializer/singlestoreSerializer'; import { SQLiteSchema as SQLiteSchemaKit, sqliteSchema, squashSqliteScheme } from './serializer/sqliteSchema'; import { generateSqliteSnapshot } from './serializer/sqliteSerializer'; import type { DB, SQLiteDB } from './utils'; export type DrizzleSnapshotJSON = PgSchemaKit; export type DrizzleSQLiteSnapshotJSON = SQLiteSchemaKit; export type DrizzleMySQLSnapshotJSON = MySQLSchemaKit; +export type DrizzleSingleStoreSnapshotJSON = SingleStoreSchemaKit; export const generateDrizzleJson = ( imports: Record, @@ -374,6 +382,112 @@ export const pushMySQLSchema = async ( }; }; +// SingleStore + +export const generateSingleStoreDrizzleJson = async ( + imports: Record, + prevId?: string, + casing?: CasingType, +): Promise => { + const { prepareFromExports } = await import('./serializer/singlestoreImports'); + + const prepared = prepareFromExports(imports); + + const id = randomUUID(); + + const snapshot = generateSingleStoreSnapshot(prepared.tables, /* prepared.views, */ casing); + + return { + ...snapshot, + id, + prevId: prevId ?? originUUID, + }; +}; + +export const generateSingleStoreMigration = async ( + prev: DrizzleSingleStoreSnapshotJSON, + cur: DrizzleSingleStoreSnapshotJSON, +) => { + const { applySingleStoreSnapshotsDiff } = await import('./snapshotsDiffer'); + + const validatedPrev = singlestoreSchema.parse(prev); + const validatedCur = singlestoreSchema.parse(cur); + + const squashedPrev = squashSingleStoreScheme(validatedPrev); + const squashedCur = squashSingleStoreScheme(validatedCur); + + const { sqlStatements } = await applySingleStoreSnapshotsDiff( + squashedPrev, + squashedCur, + tablesResolver, + columnsResolver, + /* singleStoreViewsResolver, */ + validatedPrev, + validatedCur, + 'push', + ); + + return sqlStatements; +}; + +export const pushSingleStoreSchema = async ( + imports: Record, + drizzleInstance: SingleStoreDriverDatabase, + databaseName: string, +) => { + const { applySingleStoreSnapshotsDiff } = await import('./snapshotsDiffer'); + const { logSuggestionsAndReturn } = await import( + './cli/commands/singlestorePushUtils' + ); + const { singlestorePushIntrospect } = await import( + './cli/commands/singlestoreIntrospect' + ); + const { sql } = await import('drizzle-orm'); + + const db: DB = { + query: async (query: string) => { + const res = await drizzleInstance.execute(sql.raw(query)); + return res[0] as unknown as any[]; + }, + }; + const cur = await generateSingleStoreDrizzleJson(imports); + const { schema: prev } = await singlestorePushIntrospect(db, databaseName, []); + + const validatedPrev = singlestoreSchema.parse(prev); + const validatedCur = singlestoreSchema.parse(cur); + + const squashedPrev = squashSingleStoreScheme(validatedPrev); + const squashedCur = squashSingleStoreScheme(validatedCur); + + const { statements } = await applySingleStoreSnapshotsDiff( + squashedPrev, + squashedCur, + tablesResolver, + columnsResolver, + /* singleStoreViewsResolver, */ + validatedPrev, + validatedCur, + 'push', + ); + + const { shouldAskForApprove, statementsToExecute, infoToPrint } = await logSuggestionsAndReturn( + db, + statements, + validatedCur, + ); + + return { + hasDataLoss: shouldAskForApprove, + warnings: infoToPrint, + statementsToExecute, + apply: async () => { + for (const dStmnt of statementsToExecute) { + await db.query(dStmnt); + } + }, + }; +}; + export const upPgSnapshot = (snapshot: Record) => { if (snapshot.version === '5') { return upPgV7(upPgV6(snapshot)); diff --git a/drizzle-kit/src/cli/commands/introspect.ts b/drizzle-kit/src/cli/commands/introspect.ts index 2629d000e..101eb617a 100644 --- a/drizzle-kit/src/cli/commands/introspect.ts +++ b/drizzle-kit/src/cli/commands/introspect.ts @@ -4,20 +4,24 @@ import { render, renderWithTask } from 'hanji'; import { Minimatch } from 'minimatch'; import { join } from 'path'; import { plural, singular } from 'pluralize'; +import { drySingleStore, SingleStoreSchema, squashSingleStoreScheme } from 'src/serializer/singlestoreSchema'; import { assertUnreachable, originUUID } from '../../global'; import { schemaToTypeScript as mysqlSchemaToTypeScript } from '../../introspect-mysql'; import { paramNameFor, schemaToTypeScript as postgresSchemaToTypeScript } from '../../introspect-pg'; +import { schemaToTypeScript as singlestoreSchemaToTypeScript } from '../../introspect-singlestore'; import { schemaToTypeScript as sqliteSchemaToTypeScript } from '../../introspect-sqlite'; import { dryMySql, MySqlSchema, squashMysqlScheme } from '../../serializer/mysqlSchema'; import { fromDatabase as fromMysqlDatabase } from '../../serializer/mysqlSerializer'; import { dryPg, type PgSchema, squashPgScheme } from '../../serializer/pgSchema'; import { fromDatabase as fromPostgresDatabase } from '../../serializer/pgSerializer'; +import { fromDatabase as fromSingleStoreDatabase } from '../../serializer/singlestoreSerializer'; import { drySQLite, type SQLiteSchema, squashSqliteScheme } from '../../serializer/sqliteSchema'; import { fromDatabase as fromSqliteDatabase } from '../../serializer/sqliteSerializer'; import { applyLibSQLSnapshotsDiff, applyMysqlSnapshotsDiff, applyPgSnapshotsDiff, + applySingleStoreSnapshotsDiff, applySqliteSnapshotsDiff, } from '../../snapshotsDiffer'; import { prepareOutFolder } from '../../utils'; @@ -26,6 +30,7 @@ import type { Casing, Prefix } from '../validations/common'; import { LibSQLCredentials } from '../validations/libsql'; import type { MysqlCredentials } from '../validations/mysql'; import type { PostgresCredentials } from '../validations/postgres'; +import { SingleStoreCredentials } from '../validations/singlestore'; import type { SqliteCredentials } from '../validations/sqlite'; import { IntrospectProgress } from '../views'; import { @@ -280,6 +285,103 @@ export const introspectMysql = async ( process.exit(0); }; +export const introspectSingleStore = async ( + casing: Casing, + out: string, + breakpoints: boolean, + credentials: SingleStoreCredentials, + tablesFilter: string[], + prefix: Prefix, +) => { + const { connectToSingleStore } = await import('../connections'); + const { db, database } = await connectToSingleStore(credentials); + + const matchers = tablesFilter.map((it) => { + return new Minimatch(it); + }); + + const filter = (tableName: string) => { + if (matchers.length === 0) return true; + + let flags: boolean[] = []; + + for (let matcher of matchers) { + if (matcher.negate) { + if (!matcher.match(tableName)) { + flags.push(false); + } + } + + if (matcher.match(tableName)) { + flags.push(true); + } + } + + if (flags.length > 0) { + return flags.every(Boolean); + } + return false; + }; + + const progress = new IntrospectProgress(); + const res = await renderWithTask( + progress, + fromSingleStoreDatabase(db, database, filter, (stage, count, status) => { + progress.update(stage, count, status); + }), + ); + + const schema = { id: originUUID, prevId: '', ...res } as SingleStoreSchema; + const ts = singlestoreSchemaToTypeScript(schema, casing); + const { internal, ...schemaWithoutInternals } = schema; + + const schemaFile = join(out, 'schema.ts'); + writeFileSync(schemaFile, ts.file); + console.log(); + + const { snapshots, journal } = prepareOutFolder(out, 'postgresql'); + + if (snapshots.length === 0) { + const { sqlStatements, _meta } = await applySingleStoreSnapshotsDiff( + squashSingleStoreScheme(drySingleStore), + squashSingleStoreScheme(schema), + tablesResolver, + columnsResolver, + /* singleStoreViewsResolver, */ + drySingleStore, + schema, + ); + + writeResult({ + cur: schema, + sqlStatements, + journal, + _meta, + outFolder: out, + breakpoints, + type: 'introspect', + prefixMode: prefix, + }); + } else { + render( + `[${ + chalk.blue( + 'i', + ) + }] No SQL generated, you already have migrations in project`, + ); + } + + render( + `[${ + chalk.green( + '✓', + ) + }] You schema file is ready ➜ ${chalk.bold.underline.blue(schemaFile)} 🚀`, + ); + process.exit(0); +}; + export const introspectSqlite = async ( casing: Casing, out: string, diff --git a/drizzle-kit/src/cli/commands/migrate.ts b/drizzle-kit/src/cli/commands/migrate.ts index 92770e99d..60b041fd9 100644 --- a/drizzle-kit/src/cli/commands/migrate.ts +++ b/drizzle-kit/src/cli/commands/migrate.ts @@ -4,6 +4,8 @@ import { prepareMySqlMigrationSnapshot, preparePgDbPushSnapshot, preparePgMigrationSnapshot, + prepareSingleStoreDbPushSnapshot, + prepareSingleStoreMigrationSnapshot, prepareSQLiteDbPushSnapshot, prepareSqliteMigrationSnapshot, } from '../../migrationPreparator'; @@ -11,6 +13,7 @@ import { import chalk from 'chalk'; import { render } from 'hanji'; import path, { join } from 'path'; +import { SingleStoreSchema, singlestoreSchema, squashSingleStoreScheme } from 'src/serializer/singlestoreSchema'; import { TypeOf } from 'zod'; import type { CommonSchema } from '../../schemaValidator'; import { MySqlSchema, mysqlSchema, squashMysqlScheme, ViewSquashed } from '../../serializer/mysqlSchema'; @@ -20,6 +23,7 @@ import { applyLibSQLSnapshotsDiff, applyMysqlSnapshotsDiff, applyPgSnapshotsDiff, + applySingleStoreSnapshotsDiff, applySqliteSnapshotsDiff, Column, ColumnsResolverInput, @@ -143,6 +147,28 @@ export const mySqlViewsResolver = async ( } }; +/* export const singleStoreViewsResolver = async ( + input: ResolverInput, +): Promise> => { + try { + const { created, deleted, moved, renamed } = await promptNamedWithSchemasConflict( + input.created, + input.deleted, + 'view', + ); + + return { + created: created, + deleted: deleted, + moved: moved, + renamed: renamed, + }; + } catch (e) { + console.error(e); + throw e; + } +}; */ + export const sqliteViewsResolver = async ( input: ResolverInput, ): Promise> => { @@ -521,6 +547,156 @@ export const prepareAndMigrateMysql = async (config: GenerateConfig) => { } }; +// Not needed for now +function singleStoreSchemaSuggestions( + curSchema: TypeOf, + prevSchema: TypeOf, +) { + const suggestions: string[] = []; + const usedSuggestions: string[] = []; + const suggestionTypes = { + // TODO: Check if SingleStore has serial type + serial: withStyle.errorWarning( + `We deprecated the use of 'serial' for SingleStore starting from version 0.20.0. In SingleStore, 'serial' is simply an alias for 'bigint unsigned not null auto_increment unique,' which creates all constraints and indexes for you. This may make the process less explicit for both users and drizzle-kit push commands`, + ), + }; + + for (const table of Object.values(curSchema.tables)) { + for (const column of Object.values(table.columns)) { + if (column.type === 'serial') { + if (!usedSuggestions.includes('serial')) { + suggestions.push(suggestionTypes['serial']); + } + + const uniqueForSerial = Object.values( + prevSchema.tables[table.name].uniqueConstraints, + ).find((it) => it.columns[0] === column.name); + + suggestions.push( + `\n` + + withStyle.suggestion( + `We are suggesting to change ${ + chalk.blue( + column.name, + ) + } column in ${ + chalk.blueBright( + table.name, + ) + } table from serial to bigint unsigned\n\n${ + chalk.blueBright( + `bigint("${column.name}", { mode: "number", unsigned: true }).notNull().autoincrement().unique(${ + uniqueForSerial?.name ? `"${uniqueForSerial?.name}"` : '' + })`, + ) + }`, + ), + ); + } + } + } + + return suggestions; +} + +// Intersect with prepareAnMigrate +export const prepareSingleStorePush = async ( + schemaPath: string | string[], + snapshot: SingleStoreSchema, + casing: CasingType | undefined, +) => { + try { + const { prev, cur } = await prepareSingleStoreDbPushSnapshot( + snapshot, + schemaPath, + casing, + ); + + const validatedPrev = singlestoreSchema.parse(prev); + const validatedCur = singlestoreSchema.parse(cur); + + const squashedPrev = squashSingleStoreScheme(validatedPrev); + const squashedCur = squashSingleStoreScheme(validatedCur); + + const { sqlStatements, statements } = await applySingleStoreSnapshotsDiff( + squashedPrev, + squashedCur, + tablesResolver, + columnsResolver, + /* singleStoreViewsResolver, */ + validatedPrev, + validatedCur, + 'push', + ); + + return { sqlStatements, statements, validatedCur, validatedPrev }; + } catch (e) { + console.error(e); + process.exit(1); + } +}; + +export const prepareAndMigrateSingleStore = async (config: GenerateConfig) => { + const outFolder = config.out; + const schemaPath = config.schema; + const casing = config.casing; + + try { + // TODO: remove + assertV1OutFolder(outFolder); + + const { snapshots, journal } = prepareMigrationFolder(outFolder, 'singlestore'); + const { prev, cur, custom } = await prepareSingleStoreMigrationSnapshot( + snapshots, + schemaPath, + casing, + ); + + const validatedPrev = singlestoreSchema.parse(prev); + const validatedCur = singlestoreSchema.parse(cur); + + if (config.custom) { + writeResult({ + cur: custom, + sqlStatements: [], + journal, + outFolder, + name: config.name, + breakpoints: config.breakpoints, + type: 'custom', + prefixMode: config.prefix, + }); + return; + } + + const squashedPrev = squashSingleStoreScheme(validatedPrev); + const squashedCur = squashSingleStoreScheme(validatedCur); + + const { sqlStatements, _meta } = await applySingleStoreSnapshotsDiff( + squashedPrev, + squashedCur, + tablesResolver, + columnsResolver, + /* singleStoreViewsResolver, */ + validatedPrev, + validatedCur, + ); + + writeResult({ + cur, + sqlStatements, + journal, + _meta, + outFolder, + name: config.name, + breakpoints: config.breakpoints, + prefixMode: config.prefix, + }); + } catch (e) { + console.error(e); + } +}; + export const prepareAndMigrateSqlite = async (config: GenerateConfig) => { const outFolder = config.out; const schemaPath = config.schema; diff --git a/drizzle-kit/src/cli/commands/push.ts b/drizzle-kit/src/cli/commands/push.ts index 4a41a46d4..0c82fe026 100644 --- a/drizzle-kit/src/cli/commands/push.ts +++ b/drizzle-kit/src/cli/commands/push.ts @@ -10,10 +10,18 @@ import { LibSQLCredentials } from '../validations/libsql'; import type { MysqlCredentials } from '../validations/mysql'; import { withStyle } from '../validations/outputs'; import type { PostgresCredentials } from '../validations/postgres'; +import { SingleStoreCredentials } from '../validations/singlestore'; import type { SqliteCredentials } from '../validations/sqlite'; import { libSqlLogSuggestionsAndReturn } from './libSqlPushUtils'; -import { filterStatements, logSuggestionsAndReturn } from './mysqlPushUtils'; +import { + filterStatements as mySqlFilterStatements, + logSuggestionsAndReturn as mySqlLogSuggestionsAndReturn, +} from './mysqlPushUtils'; import { pgSuggestions } from './pgPushUtils'; +import { + filterStatements as singleStoreFilterStatements, + logSuggestionsAndReturn as singleStoreLogSuggestionsAndReturn, +} from './singlestorePushUtils'; import { logSuggestionsAndReturn as sqliteSuggestions } from './sqlitePushUtils'; export const mysqlPush = async ( @@ -35,7 +43,7 @@ export const mysqlPush = async ( const statements = await prepareMySQLPush(schemaPath, schema, casing); - const filteredStatements = filterStatements( + const filteredStatements = mySqlFilterStatements( statements.statements ?? [], statements.validatedCur, statements.validatedPrev, @@ -52,8 +60,7 @@ export const mysqlPush = async ( tablesToRemove, tablesToTruncate, infoToPrint, - schemasToRemove, - } = await logSuggestionsAndReturn( + } = await mySqlLogSuggestionsAndReturn( db, filteredStatements, statements.validatedCur, @@ -156,6 +163,150 @@ export const mysqlPush = async ( } }; +export const singlestorePush = async ( + schemaPath: string | string[], + credentials: SingleStoreCredentials, + tablesFilter: string[], + strict: boolean, + verbose: boolean, + force: boolean, + casing: CasingType | undefined, +) => { + const { connectToSingleStore } = await import('../connections'); + const { singlestorePushIntrospect } = await import('./singlestoreIntrospect'); + + const { db, database } = await connectToSingleStore(credentials); + + const { schema } = await singlestorePushIntrospect( + db, + database, + tablesFilter, + ); + const { prepareSingleStorePush } = await import('./migrate'); + + const statements = await prepareSingleStorePush(schemaPath, schema, casing); + + const filteredStatements = singleStoreFilterStatements( + statements.statements ?? [], + statements.validatedCur, + statements.validatedPrev, + ); + + try { + if (filteredStatements.length === 0) { + render(`[${chalk.blue('i')}] No changes detected`); + } else { + const { + shouldAskForApprove, + statementsToExecute, + columnsToRemove, + tablesToRemove, + tablesToTruncate, + infoToPrint, + schemasToRemove, + } = await singleStoreLogSuggestionsAndReturn( + db, + filteredStatements, + statements.validatedCur, + ); + + const filteredSqlStatements = fromJson(filteredStatements, 'singlestore'); + + const uniqueSqlStatementsToExecute: string[] = []; + statementsToExecute.forEach((ss) => { + if (!uniqueSqlStatementsToExecute.includes(ss)) { + uniqueSqlStatementsToExecute.push(ss); + } + }); + const uniqueFilteredSqlStatements: string[] = []; + filteredSqlStatements.forEach((ss) => { + if (!uniqueFilteredSqlStatements.includes(ss)) { + uniqueFilteredSqlStatements.push(ss); + } + }); + + if (verbose) { + console.log(); + console.log( + withStyle.warning('You are about to execute current statements:'), + ); + console.log(); + console.log( + [...uniqueSqlStatementsToExecute, ...uniqueFilteredSqlStatements] + .map((s) => chalk.blue(s)) + .join('\n'), + ); + console.log(); + } + + if (!force && strict) { + if (!shouldAskForApprove) { + const { status, data } = await render( + new Select(['No, abort', `Yes, I want to execute all statements`]), + ); + if (data?.index === 0) { + render(`[${chalk.red('x')}] All changes were aborted`); + process.exit(0); + } + } + } + + if (!force && shouldAskForApprove) { + console.log(withStyle.warning('Found data-loss statements:')); + console.log(infoToPrint.join('\n')); + console.log(); + console.log( + chalk.red.bold( + 'THIS ACTION WILL CAUSE DATA LOSS AND CANNOT BE REVERTED\n', + ), + ); + + console.log(chalk.white('Do you still want to push changes?')); + + const { status, data } = await render( + new Select([ + 'No, abort', + `Yes, I want to${ + tablesToRemove.length > 0 + ? ` remove ${tablesToRemove.length} ${tablesToRemove.length > 1 ? 'tables' : 'table'},` + : ' ' + }${ + columnsToRemove.length > 0 + ? ` remove ${columnsToRemove.length} ${columnsToRemove.length > 1 ? 'columns' : 'column'},` + : ' ' + }${ + tablesToTruncate.length > 0 + ? ` truncate ${tablesToTruncate.length} ${tablesToTruncate.length > 1 ? 'tables' : 'table'}` + : '' + }` + .replace(/(^,)|(,$)/g, '') + .replace(/ +(?= )/g, ''), + ]), + ); + if (data?.index === 0) { + render(`[${chalk.red('x')}] All changes were aborted`); + process.exit(0); + } + } + + for (const dStmnt of uniqueSqlStatementsToExecute) { + await db.query(dStmnt); + } + + for (const statement of uniqueFilteredSqlStatements) { + await db.query(statement); + } + if (filteredStatements.length > 0) { + render(`[${chalk.green('✓')}] Changes applied`); + } else { + render(`[${chalk.blue('i')}] No changes detected`); + } + } + } catch (e) { + console.log(e); + } +}; + export const pgPush = async ( schemaPath: string | string[], verbose: boolean, @@ -171,7 +322,6 @@ export const pgPush = async ( const { pgPushIntrospect } = await import('./pgIntrospect'); const db = await preparePostgresDB(credentials); - const serialized = await serializePg(schemaPath, casing, schemasFilter); const { schema } = await pgPushIntrospect(db, tablesFilter, schemasFilter, entities, serialized); diff --git a/drizzle-kit/src/cli/commands/singlestoreIntrospect.ts b/drizzle-kit/src/cli/commands/singlestoreIntrospect.ts new file mode 100644 index 000000000..27d8c59c5 --- /dev/null +++ b/drizzle-kit/src/cli/commands/singlestoreIntrospect.ts @@ -0,0 +1,53 @@ +import { renderWithTask } from 'hanji'; +import { Minimatch } from 'minimatch'; +import { originUUID } from '../../global'; +import type { SingleStoreSchema } from '../../serializer/singlestoreSchema'; +import { fromDatabase } from '../../serializer/singlestoreSerializer'; +import type { DB } from '../../utils'; +import { ProgressView } from '../views'; + +export const singlestorePushIntrospect = async ( + db: DB, + databaseName: string, + filters: string[], +) => { + const matchers = filters.map((it) => { + return new Minimatch(it); + }); + + const filter = (tableName: string) => { + if (matchers.length === 0) return true; + + let flags: boolean[] = []; + + for (let matcher of matchers) { + if (matcher.negate) { + if (!matcher.match(tableName)) { + flags.push(false); + } + } + + if (matcher.match(tableName)) { + flags.push(true); + } + } + + if (flags.length > 0) { + return flags.every(Boolean); + } + return false; + }; + + const progress = new ProgressView( + 'Pulling schema from database...', + 'Pulling schema from database...', + ); + const res = await renderWithTask( + progress, + fromDatabase(db, databaseName, filter), + ); + + const schema = { id: originUUID, prevId: '', ...res } as SingleStoreSchema; + const { internal, ...schemaWithoutInternals } = schema; + return { schema: schemaWithoutInternals }; +}; diff --git a/drizzle-kit/src/cli/commands/singlestorePushUtils.ts b/drizzle-kit/src/cli/commands/singlestorePushUtils.ts new file mode 100644 index 000000000..80fad9b2d --- /dev/null +++ b/drizzle-kit/src/cli/commands/singlestorePushUtils.ts @@ -0,0 +1,352 @@ +import chalk from 'chalk'; +import { render } from 'hanji'; +import { TypeOf } from 'zod'; +import { JsonAlterColumnTypeStatement, JsonStatement } from '../../jsonStatements'; +import { singlestoreSchema, SingleStoreSquasher } from '../../serializer/singlestoreSchema'; +import type { DB } from '../../utils'; +import { Select } from '../selector-ui'; +import { withStyle } from '../validations/outputs'; + +export const filterStatements = ( + statements: JsonStatement[], + currentSchema: TypeOf, + prevSchema: TypeOf, +) => { + return statements.filter((statement) => { + if (statement.type === 'alter_table_alter_column_set_type') { + // Don't need to handle it on migrations step and introspection + // but for both it should be skipped + if ( + statement.oldDataType.startsWith('tinyint') + && statement.newDataType.startsWith('boolean') + ) { + return false; + } + + if ( + statement.oldDataType.startsWith('bigint unsigned') + && statement.newDataType.startsWith('serial') + ) { + return false; + } + + if ( + statement.oldDataType.startsWith('serial') + && statement.newDataType.startsWith('bigint unsigned') + ) { + return false; + } + } else if (statement.type === 'alter_table_alter_column_set_default') { + if ( + statement.newDefaultValue === false + && statement.oldDefaultValue === 0 + && statement.newDataType === 'boolean' + ) { + return false; + } + if ( + statement.newDefaultValue === true + && statement.oldDefaultValue === 1 + && statement.newDataType === 'boolean' + ) { + return false; + } + } else if (statement.type === 'delete_unique_constraint') { + const unsquashed = SingleStoreSquasher.unsquashUnique(statement.data); + // only if constraint was removed from a serial column, than treat it as removed + // const serialStatement = statements.find( + // (it) => it.type === "alter_table_alter_column_set_type" + // ) as JsonAlterColumnTypeStatement; + // if ( + // serialStatement?.oldDataType.startsWith("bigint unsigned") && + // serialStatement?.newDataType.startsWith("serial") && + // serialStatement.columnName === + // SingleStoreSquasher.unsquashUnique(statement.data).columns[0] + // ) { + // return false; + // } + // Check if uniqueindex was only on this column, that is serial + + // if now serial and was not serial and was unique index + if ( + unsquashed.columns.length === 1 + && currentSchema.tables[statement.tableName].columns[unsquashed.columns[0]] + .type === 'serial' + && prevSchema.tables[statement.tableName].columns[unsquashed.columns[0]] + .type === 'serial' + && currentSchema.tables[statement.tableName].columns[unsquashed.columns[0]] + .name === unsquashed.columns[0] + ) { + return false; + } + } else if (statement.type === 'alter_table_alter_column_drop_notnull') { + // only if constraint was removed from a serial column, than treat it as removed + const serialStatement = statements.find( + (it) => it.type === 'alter_table_alter_column_set_type', + ) as JsonAlterColumnTypeStatement; + if ( + serialStatement?.oldDataType.startsWith('bigint unsigned') + && serialStatement?.newDataType.startsWith('serial') + && serialStatement.columnName === statement.columnName + && serialStatement.tableName === statement.tableName + ) { + return false; + } + if (statement.newDataType === 'serial' && !statement.columnNotNull) { + return false; + } + if (statement.columnAutoIncrement) { + return false; + } + } + + return true; + }); +}; + +export const logSuggestionsAndReturn = async ( + db: DB, + statements: JsonStatement[], + json2: TypeOf, +) => { + let shouldAskForApprove = false; + const statementsToExecute: string[] = []; + const infoToPrint: string[] = []; + + const tablesToRemove: string[] = []; + const columnsToRemove: string[] = []; + const schemasToRemove: string[] = []; + const tablesToTruncate: string[] = []; + + for (const statement of statements) { + if (statement.type === 'drop_table') { + const res = await db.query( + `select count(*) as count from \`${statement.tableName}\``, + ); + const count = Number(res[0].count); + if (count > 0) { + infoToPrint.push( + `· You're about to delete ${ + chalk.underline( + statement.tableName, + ) + } table with ${count} items`, + ); + tablesToRemove.push(statement.tableName); + shouldAskForApprove = true; + } + } else if (statement.type === 'alter_table_drop_column') { + const res = await db.query( + `select count(*) as count from \`${statement.tableName}\``, + ); + const count = Number(res[0].count); + if (count > 0) { + infoToPrint.push( + `· You're about to delete ${ + chalk.underline( + statement.columnName, + ) + } column in ${statement.tableName} table with ${count} items`, + ); + columnsToRemove.push(`${statement.tableName}_${statement.columnName}`); + shouldAskForApprove = true; + } + } else if (statement.type === 'drop_schema') { + const res = await db.query( + `select count(*) as count from information_schema.tables where table_schema = \`${statement.name}\`;`, + ); + const count = Number(res[0].count); + if (count > 0) { + infoToPrint.push( + `· You're about to delete ${ + chalk.underline( + statement.name, + ) + } schema with ${count} tables`, + ); + schemasToRemove.push(statement.name); + shouldAskForApprove = true; + } + } else if (statement.type === 'alter_table_alter_column_set_type') { + const res = await db.query( + `select count(*) as count from \`${statement.tableName}\``, + ); + const count = Number(res[0].count); + if (count > 0) { + infoToPrint.push( + `· You're about to change ${ + chalk.underline( + statement.columnName, + ) + } column type from ${ + chalk.underline( + statement.oldDataType, + ) + } to ${chalk.underline(statement.newDataType)} with ${count} items`, + ); + statementsToExecute.push(`truncate table ${statement.tableName};`); + tablesToTruncate.push(statement.tableName); + shouldAskForApprove = true; + } + } else if (statement.type === 'alter_table_alter_column_drop_default') { + if (statement.columnNotNull) { + const res = await db.query( + `select count(*) as count from \`${statement.tableName}\``, + ); + + const count = Number(res[0].count); + if (count > 0) { + infoToPrint.push( + `· You're about to remove default value from ${ + chalk.underline( + statement.columnName, + ) + } not-null column with ${count} items`, + ); + + tablesToTruncate.push(statement.tableName); + statementsToExecute.push(`truncate table ${statement.tableName};`); + + shouldAskForApprove = true; + } + } + // shouldAskForApprove = true; + } else if (statement.type === 'alter_table_alter_column_set_notnull') { + if (typeof statement.columnDefault === 'undefined') { + const res = await db.query( + `select count(*) as count from \`${statement.tableName}\``, + ); + + const count = Number(res[0].count); + if (count > 0) { + infoToPrint.push( + `· You're about to set not-null constraint to ${ + chalk.underline( + statement.columnName, + ) + } column without default, which contains ${count} items`, + ); + + tablesToTruncate.push(statement.tableName); + statementsToExecute.push(`truncate table ${statement.tableName};`); + + shouldAskForApprove = true; + } + } + } else if (statement.type === 'alter_table_alter_column_drop_pk') { + const res = await db.query( + `select count(*) as count from \`${statement.tableName}\``, + ); + + // if drop pk and json2 has autoincrement in table -> exit process with error + if ( + Object.values(json2.tables[statement.tableName].columns).filter( + (column) => column.autoincrement, + ).length > 0 + ) { + console.log( + `${ + withStyle.errorWarning( + `You have removed the primary key from a ${statement.tableName} table without removing the auto-increment property from this table. As the database error states: 'there can be only one auto column, and it must be defined as a key. Make sure to remove autoincrement from ${statement.tableName} table`, + ) + }`, + ); + process.exit(1); + } + + const count = Number(res[0].count); + if (count > 0) { + infoToPrint.push( + `· You're about to change ${ + chalk.underline( + statement.tableName, + ) + } primary key. This statements may fail and you table may left without primary key`, + ); + + tablesToTruncate.push(statement.tableName); + shouldAskForApprove = true; + } + } else if (statement.type === 'delete_composite_pk') { + // if drop pk and json2 has autoincrement in table -> exit process with error + if ( + Object.values(json2.tables[statement.tableName].columns).filter( + (column) => column.autoincrement, + ).length > 0 + ) { + console.log( + `${ + withStyle.errorWarning( + `You have removed the primary key from a ${statement.tableName} table without removing the auto-increment property from this table. As the database error states: 'there can be only one auto column, and it must be defined as a key. Make sure to remove autoincrement from ${statement.tableName} table`, + ) + }`, + ); + process.exit(1); + } + } else if (statement.type === 'alter_table_add_column') { + if ( + statement.column.notNull + && typeof statement.column.default === 'undefined' + ) { + const res = await db.query( + `select count(*) as count from \`${statement.tableName}\``, + ); + const count = Number(res[0].count); + if (count > 0) { + infoToPrint.push( + `· You're about to add not-null ${ + chalk.underline( + statement.column.name, + ) + } column without default value, which contains ${count} items`, + ); + + tablesToTruncate.push(statement.tableName); + statementsToExecute.push(`truncate table ${statement.tableName};`); + + shouldAskForApprove = true; + } + } + } else if (statement.type === 'create_unique_constraint') { + const res = await db.query( + `select count(*) as count from \`${statement.tableName}\``, + ); + const count = Number(res[0].count); + if (count > 0) { + const unsquashedUnique = SingleStoreSquasher.unsquashUnique(statement.data); + console.log( + `· You're about to add ${ + chalk.underline( + unsquashedUnique.name, + ) + } unique constraint to the table, which contains ${count} items. If this statement fails, you will receive an error from the database. Do you want to truncate ${ + chalk.underline( + statement.tableName, + ) + } table?\n`, + ); + const { status, data } = await render( + new Select([ + 'No, add the constraint without truncating the table', + `Yes, truncate the table`, + ]), + ); + if (data?.index === 1) { + tablesToTruncate.push(statement.tableName); + statementsToExecute.push(`truncate table ${statement.tableName};`); + shouldAskForApprove = true; + } + } + } + } + + return { + statementsToExecute, + shouldAskForApprove, + infoToPrint, + columnsToRemove: [...new Set(columnsToRemove)], + schemasToRemove: [...new Set(schemasToRemove)], + tablesToTruncate: [...new Set(tablesToTruncate)], + tablesToRemove: [...new Set(tablesToRemove)], + }; +}; diff --git a/drizzle-kit/src/cli/commands/singlestoreUp.ts b/drizzle-kit/src/cli/commands/singlestoreUp.ts new file mode 100644 index 000000000..dc5004ed0 --- /dev/null +++ b/drizzle-kit/src/cli/commands/singlestoreUp.ts @@ -0,0 +1 @@ +export const upSinglestoreHandler = (out: string) => {}; diff --git a/drizzle-kit/src/cli/commands/utils.ts b/drizzle-kit/src/cli/commands/utils.ts index 7386b74d5..88476c56e 100644 --- a/drizzle-kit/src/cli/commands/utils.ts +++ b/drizzle-kit/src/cli/commands/utils.ts @@ -31,6 +31,11 @@ import { postgresCredentials, printConfigConnectionIssues as printIssuesPg, } from '../validations/postgres'; +import { + printConfigConnectionIssues as printIssuesSingleStore, + SingleStoreCredentials, + singlestoreCredentials, +} from '../validations/singlestore'; import { printConfigConnectionIssues as printIssuesSqlite, SqliteCredentials, @@ -222,6 +227,10 @@ export const preparePushConfig = async ( dialect: 'turso'; credentials: LibSQLCredentials; } + | { + dialect: 'singlestore'; + credentials: SingleStoreCredentials; + } ) & { schemaPath: string | string[]; verbose: boolean; @@ -316,6 +325,25 @@ export const preparePushConfig = async ( }; } + if (config.dialect === 'singlestore') { + const parsed = singlestoreCredentials.safeParse(config); + if (!parsed.success) { + printIssuesSingleStore(config); + process.exit(1); + } + + return { + dialect: 'singlestore', + schemaPath: config.schema, + strict: config.strict ?? false, + verbose: config.verbose ?? false, + force: (options.force as boolean) ?? false, + credentials: parsed.data, + tablesFilter, + schemasFilter, + }; + } + if (config.dialect === 'sqlite') { const parsed = sqliteCredentials.safeParse(config); if (!parsed.success) { @@ -378,6 +406,10 @@ export const preparePullConfig = async ( dialect: 'turso'; credentials: LibSQLCredentials; } + | { + dialect: 'singlestore'; + credentials: SingleStoreCredentials; + } ) & { out: string; breakpoints: boolean; @@ -468,6 +500,26 @@ export const preparePullConfig = async ( }; } + if (dialect === 'singlestore') { + const parsed = singlestoreCredentials.safeParse(config); + if (!parsed.success) { + printIssuesSingleStore(config); + process.exit(1); + } + + return { + dialect: 'singlestore', + out: config.out, + breakpoints: config.breakpoints, + casing: config.casing, + credentials: parsed.data, + tablesFilter, + schemasFilter, + prefix: config.migrations?.prefix || 'index', + entities: config.entities, + }; + } + if (dialect === 'sqlite') { const parsed = sqliteCredentials.safeParse(config); if (!parsed.success) { @@ -559,6 +611,23 @@ export const prepareStudioConfig = async (options: Record) => { credentials, }; } + + if (dialect === 'singlestore') { + const parsed = singlestoreCredentials.safeParse(flattened); + if (!parsed.success) { + printIssuesSingleStore(flattened as Record); + process.exit(1); + } + const credentials = parsed.data; + return { + dialect, + schema, + host, + port, + credentials, + }; + } + if (dialect === 'sqlite') { const parsed = sqliteCredentials.safeParse(flattened); if (!parsed.success) { @@ -644,6 +713,23 @@ export const prepareMigrateConfig = async (configPath: string | undefined) => { table, }; } + + if (dialect === 'singlestore') { + const parsed = singlestoreCredentials.safeParse(flattened); + if (!parsed.success) { + printIssuesSingleStore(flattened as Record); + process.exit(1); + } + const credentials = parsed.data; + return { + dialect, + out, + credentials, + schema, + table, + }; + } + if (dialect === 'sqlite') { const parsed = sqliteCredentials.safeParse(flattened); if (!parsed.success) { diff --git a/drizzle-kit/src/cli/connections.ts b/drizzle-kit/src/cli/connections.ts index aab1d0ef7..f2cf4817c 100644 --- a/drizzle-kit/src/cli/connections.ts +++ b/drizzle-kit/src/cli/connections.ts @@ -19,6 +19,7 @@ import { LibSQLCredentials } from './validations/libsql'; import type { MysqlCredentials } from './validations/mysql'; import { withStyle } from './validations/outputs'; import type { PostgresCredentials } from './validations/postgres'; +import { SingleStoreCredentials } from './validations/singlestore'; import type { SqliteCredentials } from './validations/sqlite'; export const preparePostgresDB = async ( @@ -415,6 +416,85 @@ export const preparePostgresDB = async ( process.exit(1); }; +const parseSingleStoreCredentials = (credentials: SingleStoreCredentials) => { + if ('url' in credentials) { + const url = credentials.url; + + const connectionUrl = new URL(url); + const pathname = connectionUrl.pathname; + + const database = pathname.split('/')[pathname.split('/').length - 1]; + if (!database) { + console.error( + 'You should specify a database name in connection string (singlestore://USER:PASSWORD@HOST:PORT/DATABASE)', + ); + process.exit(1); + } + return { database, url }; + } else { + return { + database: credentials.database, + credentials, + }; + } +}; + +export const connectToSingleStore = async ( + it: SingleStoreCredentials, +): Promise<{ + db: DB; + proxy: Proxy; + database: string; + migrate: (config: MigrationConfig) => Promise; +}> => { + const result = parseSingleStoreCredentials(it); + + if (await checkPackage('mysql2')) { + const { createConnection } = await import('mysql2/promise'); + const { drizzle } = await import('drizzle-orm/singlestore'); + const { migrate } = await import('drizzle-orm/singlestore/migrator'); + + const connection = result.url + ? await createConnection(result.url) + : await createConnection(result.credentials!); // needed for some reason! + + const db = drizzle(connection); + const migrateFn = async (config: MigrationConfig) => { + return migrate(db, config); + }; + + await connection.connect(); + const query: DB['query'] = async ( + sql: string, + params?: any[], + ): Promise => { + const res = await connection.execute(sql, params); + return res[0] as any; + }; + + const proxy: Proxy = async (params: ProxyParams) => { + const result = await connection.query({ + sql: params.sql, + values: params.params, + rowsAsArray: params.mode === 'array', + }); + return result[0] as any[]; + }; + + return { + db: { query }, + proxy, + database: result.database, + migrate: migrateFn, + }; + } + + console.error( + "To connect to SingleStore database - please install 'singlestore' driver", + ); + process.exit(1); +}; + const parseMysqlCredentials = (credentials: MysqlCredentials) => { if ('url' in credentials) { const url = credentials.url; diff --git a/drizzle-kit/src/cli/schema.ts b/drizzle-kit/src/cli/schema.ts index b03acde95..12153ee74 100644 --- a/drizzle-kit/src/cli/schema.ts +++ b/drizzle-kit/src/cli/schema.ts @@ -1,11 +1,19 @@ +import { boolean, command, number, string } from '@drizzle-team/brocli'; import chalk from 'chalk'; -import { checkHandler } from './commands/check'; -import { assertOrmCoreVersion, assertPackages, assertStudioNodeVersion, ormVersionGt } from './utils'; +import 'dotenv/config'; +import { mkdirSync } from 'fs'; +import { renderWithTask } from 'hanji'; +import { dialects } from 'src/schemaValidator'; import '../@types/utils'; +import { assertUnreachable } from '../global'; +import { type Setup } from '../serializer/studio'; import { assertV1OutFolder } from '../utils'; +import { certs } from '../utils/certs'; +import { checkHandler } from './commands/check'; import { dropMigration } from './commands/drop'; import { upMysqlHandler } from './commands/mysqlUp'; import { upPgHandler } from './commands/pgUp'; +import { upSinglestoreHandler } from './commands/singlestoreUp'; import { upSqliteHandler } from './commands/sqliteUp'; import { prepareCheckParams, @@ -16,21 +24,16 @@ import { preparePushConfig, prepareStudioConfig, } from './commands/utils'; +import { assertOrmCoreVersion, assertPackages, assertStudioNodeVersion, ormVersionGt } from './utils'; import { assertCollisions, drivers, prefixes } from './validations/common'; import { withStyle } from './validations/outputs'; -import 'dotenv/config'; -import { boolean, command, number, string } from '@drizzle-team/brocli'; -import { mkdirSync } from 'fs'; -import { renderWithTask } from 'hanji'; -import { dialects } from 'src/schemaValidator'; -import { assertUnreachable } from '../global'; -import type { Setup } from '../serializer/studio'; -import { certs } from '../utils/certs'; import { grey, MigrateProgress } from './views'; const optionDialect = string('dialect') .enum(...dialects) - .desc(`Database dialect: 'postgresql', 'mysql', 'sqlite' or 'turso'`); + .desc( + `Database dialect: 'postgresql', 'mysql', 'sqlite', 'turso' or 'singlestore'`, + ); const optionOut = string().desc("Output folder, 'drizzle' by default"); const optionConfig = string().desc('Path to drizzle config file'); const optionBreakpoints = boolean().desc( @@ -81,6 +84,7 @@ export const generate = command({ prepareAndMigrateMysql, prepareAndMigrateSqlite, prepareAndMigrateLibSQL, + prepareAndMigrateSingleStore, } = await import('./commands/migrate'); const dialect = opts.dialect; @@ -92,6 +96,8 @@ export const generate = command({ await prepareAndMigrateSqlite(opts); } else if (dialect === 'turso') { await prepareAndMigrateLibSQL(opts); + } else if (dialect === 'singlestore') { + await prepareAndMigrateSqlite(opts); } else { assertUnreachable(dialect); } @@ -154,6 +160,17 @@ export const migrate = command({ migrationsSchema: schema, }), ); + } else if (dialect === 'singlestore') { + const { connectToSingleStore } = await import('./connections'); + const { migrate } = await connectToSingleStore(credentials); + await renderWithTask( + new MigrateProgress(), + migrate({ + migrationsFolder: out, + migrationsTable: table, + migrationsSchema: schema, + }), + ); } else if (dialect === 'sqlite') { const { connectToSQLite } = await import('./connections'); const { migrate } = await connectToSQLite(credentials); @@ -340,6 +357,17 @@ export const push = command({ force, casing, ); + } else if (dialect === 'singlestore') { + const { singlestorePush } = await import('./commands/push'); + await singlestorePush( + schemaPath, + credentials, + tablesFilter, + strict, + verbose, + force, + casing, + ); } else { assertUnreachable(dialect); } @@ -398,6 +426,10 @@ export const up = command({ if (dialect === 'sqlite' || dialect === 'turso') { upSqliteHandler(out); } + + if (dialect === 'singlestore') { + upSinglestoreHandler(out); + } }, }); @@ -531,6 +563,16 @@ export const pull = command({ tablesFilter, prefix, ); + } else if (dialect === 'singlestore') { + const { introspectSingleStore } = await import('./commands/introspect'); + await introspectSingleStore( + casing, + out, + breakpoints, + credentials, + tablesFilter, + prefix, + ); } else { assertUnreachable(dialect); } @@ -591,6 +633,8 @@ export const studio = command({ drizzleForMySQL, prepareSQLiteSchema, drizzleForSQLite, + prepareSingleStoreSchema, + drizzleForSingleStore, drizzleForLibSQL, } = await import('../serializer/studio'); @@ -637,6 +681,16 @@ export const studio = command({ ? await prepareSQLiteSchema(schemaPath) : { schema: {}, relations: {}, files: [] }; setup = await drizzleForLibSQL(credentials, schema, relations, files); + } else if (dialect === 'singlestore') { + const { schema, relations, files } = schemaPath + ? await prepareSingleStoreSchema(schemaPath) + : { schema: {}, relations: {}, files: [] }; + setup = await drizzleForSingleStore( + credentials, + schema, + relations, + files, + ); } else { assertUnreachable(dialect); } diff --git a/drizzle-kit/src/cli/validations/outputs.ts b/drizzle-kit/src/cli/validations/outputs.ts index 3ef499651..6e9d520dd 100644 --- a/drizzle-kit/src/cli/validations/outputs.ts +++ b/drizzle-kit/src/cli/validations/outputs.ts @@ -26,7 +26,7 @@ export const outputs = { ), noDialect: () => withStyle.error( - `Please specify 'dialect' param in config, either of 'postgresql', 'mysql', 'sqlite' or 'turso'`, + `Please specify 'dialect' param in config, either of 'postgresql', 'mysql', 'sqlite', turso or singlestore`, ), }, common: { @@ -79,4 +79,13 @@ export const outputs = { introspect: {}, push: {}, }, + singlestore: { + connection: { + driver: () => withStyle.error(`Only "mysql2" is available options for "--driver"`), + required: () => + withStyle.error( + `Either "url" or "host", "database" are required for database connection`, + ), + }, + }, }; diff --git a/drizzle-kit/src/cli/validations/singlestore.ts b/drizzle-kit/src/cli/validations/singlestore.ts new file mode 100644 index 000000000..ebe0cc5f0 --- /dev/null +++ b/drizzle-kit/src/cli/validations/singlestore.ts @@ -0,0 +1,61 @@ +import { boolean, coerce, object, string, TypeOf, union } from 'zod'; +import { error } from '../views'; +import { wrapParam } from './common'; +import { outputs } from './outputs'; + +export const singlestoreCredentials = union([ + object({ + host: string().min(1), + port: coerce.number().min(1).optional(), + user: string().min(1).optional(), + password: string().min(1).optional(), + database: string().min(1), + ssl: union([ + string(), + object({ + pfx: string().optional(), + key: string().optional(), + passphrase: string().optional(), + cert: string().optional(), + ca: union([string(), string().array()]).optional(), + crl: union([string(), string().array()]).optional(), + ciphers: string().optional(), + rejectUnauthorized: boolean().optional(), + }), + ]).optional(), + }), + object({ + url: string().min(1), + }), +]); + +export type SingleStoreCredentials = TypeOf; + +export const printCliConnectionIssues = (options: any) => { + const { uri, host, database } = options || {}; + + if (!uri && (!host || !database)) { + console.log(outputs.singlestore.connection.required()); + } +}; + +export const printConfigConnectionIssues = ( + options: Record, +) => { + if ('url' in options) { + let text = `Please provide required params for SingleStore driver:\n`; + console.log(error(text)); + console.log(wrapParam('url', options.url, false, 'url')); + process.exit(1); + } + + let text = `Please provide required params for SingleStore driver:\n`; + console.log(error(text)); + console.log(wrapParam('host', options.host)); + console.log(wrapParam('port', options.port, true)); + console.log(wrapParam('user', options.user, true)); + console.log(wrapParam('password', options.password, true, 'secret')); + console.log(wrapParam('database', options.database)); + console.log(wrapParam('ssl', options.ssl, true)); + process.exit(1); +}; diff --git a/drizzle-kit/src/cli/views.ts b/drizzle-kit/src/cli/views.ts index 3ec04a588..9106d31cd 100644 --- a/drizzle-kit/src/cli/views.ts +++ b/drizzle-kit/src/cli/views.ts @@ -32,7 +32,13 @@ export const schema = (schema: CommonSchema): string => { .map((t) => { const columnsCount = Object.values(t.columns).length; const indexesCount = Object.values(t.indexes).length; - const foreignKeys = Object.values(t.foreignKeys).length; + let foreignKeys: number = 0; + // Singlestore doesn't have foreign keys + if (schema.dialect !== 'singlestore') { + // @ts-expect-error + foreignKeys = Object.values(t.foreignKeys).length; + } + return `${chalk.bold.blue(t.name)} ${ chalk.gray( `${columnsCount} columns ${indexesCount} indexes ${foreignKeys} fks`, diff --git a/drizzle-kit/src/index.ts b/drizzle-kit/src/index.ts index 4a57e59e3..b59581dd0 100644 --- a/drizzle-kit/src/index.ts +++ b/drizzle-kit/src/index.ts @@ -23,7 +23,7 @@ type Verify = U; * **Config** usage: * * `dialect` - mandatory and is responsible for explicitly providing a databse dialect you are using for all the commands - * *Possible values*: `postgresql`, `mysql`, `sqlite` + * *Possible values*: `postgresql`, `mysql`, `sqlite`, `singlestore * * See https://orm.drizzle.team/kit-docs/config-reference#dialect * @@ -64,7 +64,7 @@ type Verify = U; * * `breakpoints` - param lets you enable/disable SQL statement breakpoints in generated migrations. * It’s optional and true by default, it’s necessary to properly apply migrations on databases, - * that do not support multiple DDL alternation statements in one transaction(MySQL, SQLite) and + * that do not support multiple DDL alternation statements in one transaction(MySQL, SQLite, SingleStore) and * Drizzle ORM has to apply them sequentially one by one. * * See https://orm.drizzle.team/kit-docs/config-reference#breakpoints @@ -210,6 +210,21 @@ export type Config = driver: Verify; } | {} + | { + dialect: Verify; + dbCredentials: + | { + host: string; + port?: number; + user?: string; + password?: string; + database: string; + ssl?: string | SslOptions; + } + | { + url: string; + }; + } ); /** @@ -219,7 +234,7 @@ export type Config = * **Config** usage: * * `dialect` - mandatory and is responsible for explicitly providing a databse dialect you are using for all the commands - * *Possible values*: `postgresql`, `mysql`, `sqlite` + * *Possible values*: `postgresql`, `mysql`, `sqlite`, `singlestore` * * See https://orm.drizzle.team/kit-docs/config-reference#dialect * @@ -260,7 +275,7 @@ export type Config = * * `breakpoints` - param lets you enable/disable SQL statement breakpoints in generated migrations. * It’s optional and true by default, it’s necessary to properly apply migrations on databases, - * that do not support multiple DDL alternation statements in one transaction(MySQL, SQLite) and + * that do not support multiple DDL alternation statements in one transaction(MySQL, SQLite, SingleStore) and * Drizzle ORM has to apply them sequentially one by one. * * See https://orm.drizzle.team/kit-docs/config-reference#breakpoints diff --git a/drizzle-kit/src/introspect-singlestore.ts b/drizzle-kit/src/introspect-singlestore.ts new file mode 100644 index 000000000..8f93cdfda --- /dev/null +++ b/drizzle-kit/src/introspect-singlestore.ts @@ -0,0 +1,918 @@ +/* eslint-disable @typescript-eslint/no-unsafe-argument */ +import { toCamelCase } from 'drizzle-orm/casing'; +import './@types/utils'; +import type { Casing } from './cli/validations/common'; +import { assertUnreachable } from './global'; +import { + Column, + Index, + PrimaryKey, + SingleStoreSchema, + SingleStoreSchemaInternal, + UniqueConstraint, +} from './serializer/singlestoreSchema'; +import { indexName } from './serializer/singlestoreSerializer'; + +// time precision to fsp +// {mode: "string"} for timestamp by default + +const singlestoreImportsList = new Set([ + 'singlestoreTable', + 'singlestoreEnum', + 'bigint', + 'binary', + 'boolean', + 'char', + 'date', + 'datetime', + 'decimal', + 'double', + 'float', + 'int', + 'json', + // TODO: add new type BSON + // TODO: add new type Blob + // TODO: add new type UUID + // TODO: add new type GUID + // TODO: add new type Vector + // TODO: add new type GeoPoint + 'mediumint', + 'real', + 'serial', + 'smallint', + 'text', + 'tinytext', + 'mediumtext', + 'longtext', + 'time', + 'timestamp', + 'tinyint', + 'varbinary', + 'varchar', + 'year', + 'enum', +]); + +const objToStatement = (json: any) => { + json = Object.fromEntries(Object.entries(json).filter((it) => it[1])); + + const keys = Object.keys(json); + if (keys.length === 0) return; + + let statement = '{ '; + statement += keys.map((it) => `"${it}": "${json[it]}"`).join(', '); + statement += ' }'; + return statement; +}; + +const objToStatement2 = (json: any) => { + json = Object.fromEntries(Object.entries(json).filter((it) => it[1])); + + const keys = Object.keys(json); + if (keys.length === 0) return; + + let statement = '{ '; + statement += keys.map((it) => `${it}: "${json[it]}"`).join(', '); // no "" for keys + statement += ' }'; + return statement; +}; + +const timeConfig = (json: any) => { + json = Object.fromEntries(Object.entries(json).filter((it) => it[1])); + + const keys = Object.keys(json); + if (keys.length === 0) return; + + let statement = '{ '; + statement += keys.map((it) => `${it}: ${json[it]}`).join(', '); + statement += ' }'; + return statement; +}; + +const binaryConfig = (json: any) => { + json = Object.fromEntries(Object.entries(json).filter((it) => it[1])); + + const keys = Object.keys(json); + if (keys.length === 0) return; + + let statement = '{ '; + statement += keys.map((it) => `${it}: ${json[it]}`).join(', '); + statement += ' }'; + return statement; +}; + +const importsPatch = { + 'double precision': 'doublePrecision', + 'timestamp without time zone': 'timestamp', +} as Record; + +const escapeColumnKey = (value: string) => { + if (/^(?![a-zA-Z_$][a-zA-Z0-9_$]*$).+$/.test(value)) { + return `"${value}"`; + } + return value; +}; + +const prepareCasing = (casing?: Casing) => (value: string) => { + if (casing === 'preserve') { + return escapeColumnKey(value); + } + if (casing === 'camel') { + return escapeColumnKey(value.camelCase()); + } + + assertUnreachable(casing); +}; + +const dbColumnName = ({ name, casing, withMode = false }: { name: string; casing: Casing; withMode?: boolean }) => { + if (casing === 'preserve') { + return ''; + } + if (casing === 'camel') { + return toCamelCase(name) === name ? '' : withMode ? `"${name}", ` : `"${name}"`; + } + + assertUnreachable(casing); +}; + +export const schemaToTypeScript = ( + schema: SingleStoreSchemaInternal, + casing: Casing, +) => { + const withCasing = prepareCasing(casing); + + const imports = Object.values(schema.tables).reduce( + (res, it) => { + const idxImports = Object.values(it.indexes).map((idx) => idx.isUnique ? 'uniqueIndex' : 'index'); + const pkImports = Object.values(it.compositePrimaryKeys).map( + (it) => 'primaryKey', + ); + const uniqueImports = Object.values(it.uniqueConstraints).map( + (it) => 'unique', + ); + + res.singlestore.push(...idxImports); + res.singlestore.push(...pkImports); + res.singlestore.push(...uniqueImports); + + const columnImports = Object.values(it.columns) + .map((col) => { + let patched = importsPatch[col.type] ?? col.type; + patched = patched.startsWith('varchar(') ? 'varchar' : patched; + patched = patched.startsWith('char(') ? 'char' : patched; + patched = patched.startsWith('binary(') ? 'binary' : patched; + patched = patched.startsWith('decimal(') ? 'decimal' : patched; + patched = patched.startsWith('smallint(') ? 'smallint' : patched; + patched = patched.startsWith('enum(') ? 'singlestoreEnum' : patched; + patched = patched.startsWith('datetime(') ? 'datetime' : patched; + patched = patched.startsWith('varbinary(') ? 'varbinary' : patched; + patched = patched.startsWith('int(') ? 'int' : patched; + patched = patched.startsWith('double(') ? 'double' : patched; + patched = patched.startsWith('float(') ? 'float' : patched; + patched = patched.startsWith('int unsigned') ? 'int' : patched; + patched = patched.startsWith('tinyint(') ? 'tinyint' : patched; + patched = patched.startsWith('mediumint(') ? 'mediumint' : patched; + patched = patched.startsWith('bigint(') ? 'bigint' : patched; + patched = patched.startsWith('tinyint unsigned') ? 'tinyint' : patched; + patched = patched.startsWith('smallint unsigned') ? 'smallint' : patched; + patched = patched.startsWith('mediumint unsigned') ? 'mediumint' : patched; + patched = patched.startsWith('bigint unsigned') ? 'bigint' : patched; + return patched; + }) + .filter((type) => { + return singlestoreImportsList.has(type); + }); + + res.singlestore.push(...columnImports); + return res; + }, + { singlestore: [] as string[] }, + ); + + /* Object.values(schema.views).forEach((it) => { + imports.singlestore.push('singlestoreView'); + + const columnImports = Object.values(it.columns) + .map((col) => { + let patched = importsPatch[col.type] ?? col.type; + patched = patched.startsWith('varchar(') ? 'varchar' : patched; + patched = patched.startsWith('char(') ? 'char' : patched; + patched = patched.startsWith('binary(') ? 'binary' : patched; + patched = patched.startsWith('decimal(') ? 'decimal' : patched; + patched = patched.startsWith('smallint(') ? 'smallint' : patched; + patched = patched.startsWith('enum(') ? 'singlestoreEnum' : patched; + patched = patched.startsWith('datetime(') ? 'datetime' : patched; + patched = patched.startsWith('varbinary(') ? 'varbinary' : patched; + patched = patched.startsWith('int(') ? 'int' : patched; + patched = patched.startsWith('double(') ? 'double' : patched; + patched = patched.startsWith('float(') ? 'float' : patched; + patched = patched.startsWith('int unsigned') ? 'int' : patched; + patched = patched.startsWith('tinyint(') ? 'tinyint' : patched; + patched = patched.startsWith('mediumint(') ? 'mediumint' : patched; + patched = patched.startsWith('bigint(') ? 'bigint' : patched; + patched = patched.startsWith('tinyint unsigned') ? 'tinyint' : patched; + patched = patched.startsWith('smallint unsigned') ? 'smallint' : patched; + patched = patched.startsWith('mediumint unsigned') ? 'mediumint' : patched; + patched = patched.startsWith('bigint unsigned') ? 'bigint' : patched; + return patched; + }) + .filter((type) => { + return singlestoreImportsList.has(type); + }); + + imports.singlestore.push(...columnImports); + }); */ + + const tableStatements = Object.values(schema.tables).map((table) => { + const func = 'singlestoreTable'; + let statement = ''; + if (imports.singlestore.includes(withCasing(table.name))) { + statement = `// Table name is in conflict with ${ + withCasing( + table.name, + ) + } import.\n// Please change to any other name, that is not in imports list\n`; + } + statement += `export const ${withCasing(table.name)} = ${func}("${table.name}", {\n`; + statement += createTableColumns( + Object.values(table.columns), + withCasing, + casing, + table.name, + schema, + ); + statement += '}'; + + if ( + Object.keys(table.indexes).length > 0 + || Object.keys(table.compositePrimaryKeys).length > 0 + || Object.keys(table.uniqueConstraints).length > 0 + ) { + statement += ',\n'; + statement += '(table) => {\n'; + statement += '\treturn {\n'; + statement += createTableIndexes( + table.name, + Object.values(table.indexes), + withCasing, + ); + statement += createTablePKs( + Object.values(table.compositePrimaryKeys), + withCasing, + ); + statement += createTableUniques( + Object.values(table.uniqueConstraints), + withCasing, + ); + statement += '\t}\n'; + statement += '}'; + } + + statement += ');'; + return statement; + }); + + /* const viewsStatements = Object.values(schema.views).map((view) => { + const { columns, name, algorithm, definition, sqlSecurity, withCheckOption } = view; + const func = 'singlestoreView'; + let statement = ''; + + if (imports.singlestore.includes(withCasing(name))) { + statement = `// Table name is in conflict with ${ + withCasing( + view.name, + ) + } import.\n// Please change to any other name, that is not in imports list\n`; + } + statement += `export const ${withCasing(name)} = ${func}("${name}", {\n`; + statement += createTableColumns( + Object.values(columns), + withCasing, + casing, + name, + schema, + ); + statement += '})'; + + statement += algorithm ? `.algorithm("${algorithm}")` : ''; + statement += sqlSecurity ? `.sqlSecurity("${sqlSecurity}")` : ''; + statement += withCheckOption ? `.withCheckOption("${withCheckOption}")` : ''; + statement += `.as(sql\`${definition?.replaceAll('`', '\\`')}\`);`; + + return statement; + }); */ + + const uniqueSingleStoreImports = [ + 'singlestoreTable', + 'singlestoreSchema', + 'AnySingleStoreColumn', + ...new Set(imports.singlestore), + ]; + const importsTs = `import { ${ + uniqueSingleStoreImports.join( + ', ', + ) + } } from "drizzle-orm/singlestore-core"\nimport { sql } from "drizzle-orm"\n\n`; + + let decalrations = ''; + decalrations += tableStatements.join('\n\n'); + decalrations += '\n'; + /* decalrations += viewsStatements.join('\n\n'); */ + + const file = importsTs + decalrations; + + const schemaEntry = ` + { + ${ + Object.values(schema.tables) + .map((it) => withCasing(it.name)) + .join(',') + } + } + `; + + return { + file, // backward compatible, print to file + imports: importsTs, + decalrations, + schemaEntry, + }; +}; + +const mapColumnDefault = (defaultValue: any, isExpression?: boolean) => { + if (isExpression) { + return `sql\`${defaultValue}\``; + } + + return defaultValue; +}; + +const mapColumnDefaultForJson = (defaultValue: any) => { + if ( + typeof defaultValue === 'string' + && defaultValue.startsWith("('") + && defaultValue.endsWith("')") + ) { + return defaultValue.substring(2, defaultValue.length - 2); + } + + return defaultValue; +}; + +const column = ( + type: string, + name: string, + casing: (value: string) => string, + rawCasing: Casing, + defaultValue?: any, + autoincrement?: boolean, + onUpdate?: boolean, + isExpression?: boolean, +) => { + let lowered = type; + if (!type.startsWith('enum(')) { + lowered = type.toLowerCase(); + } + + if (lowered === 'serial') { + return `${casing(name)}: serial(${dbColumnName({ name, casing: rawCasing })})`; + } + + if (lowered.startsWith('int')) { + const isUnsigned = lowered.includes('unsigned'); + const columnName = dbColumnName({ name, casing: rawCasing, withMode: isUnsigned }); + let out = `${casing(name)}: int(${columnName}${ + isUnsigned ? `${columnName.length > 0 ? ', ' : ''}{ unsigned: true }` : '' + })`; + out += autoincrement ? `.autoincrement()` : ''; + out += typeof defaultValue !== 'undefined' + ? `.default(${mapColumnDefault(defaultValue, isExpression)})` + : ''; + return out; + } + + if (lowered.startsWith('tinyint')) { + const isUnsigned = lowered.includes('unsigned'); + const columnName = dbColumnName({ name, casing: rawCasing, withMode: isUnsigned }); + // let out = `${name.camelCase()}: tinyint("${name}")`; + let out: string = `${casing(name)}: tinyint(${columnName}${ + isUnsigned ? `${columnName.length > 0 ? ', ' : ''}{ unsigned: true }` : '' + })`; + out += autoincrement ? `.autoincrement()` : ''; + out += typeof defaultValue !== 'undefined' + ? `.default(${mapColumnDefault(defaultValue, isExpression)})` + : ''; + return out; + } + + if (lowered.startsWith('smallint')) { + const isUnsigned = lowered.includes('unsigned'); + const columnName = dbColumnName({ name, casing: rawCasing, withMode: isUnsigned }); + let out = `${casing(name)}: smallint(${columnName}${ + isUnsigned ? `${columnName.length > 0 ? ', ' : ''}{ unsigned: true }` : '' + })`; + out += autoincrement ? `.autoincrement()` : ''; + out += defaultValue + ? `.default(${mapColumnDefault(defaultValue, isExpression)})` + : ''; + return out; + } + + if (lowered.startsWith('mediumint')) { + const isUnsigned = lowered.includes('unsigned'); + const columnName = dbColumnName({ name, casing: rawCasing, withMode: isUnsigned }); + let out = `${casing(name)}: mediumint(${columnName}${ + isUnsigned ? `${columnName.length > 0 ? ', ' : ''}{ unsigned: true }` : '' + })`; + out += autoincrement ? `.autoincrement()` : ''; + out += defaultValue + ? `.default(${mapColumnDefault(defaultValue, isExpression)})` + : ''; + return out; + } + + if (lowered.startsWith('bigint')) { + const isUnsigned = lowered.includes('unsigned'); + let out = `${casing(name)}: bigint(${dbColumnName({ name, casing: rawCasing, withMode: true })}{ mode: "number"${ + isUnsigned ? ', unsigned: true' : '' + } })`; + out += autoincrement ? `.autoincrement()` : ''; + out += defaultValue + ? `.default(${mapColumnDefault(defaultValue, isExpression)})` + : ''; + return out; + } + + if (lowered === 'boolean') { + let out = `${casing(name)}: boolean(${dbColumnName({ name, casing: rawCasing })})`; + out += defaultValue + ? `.default(${mapColumnDefault(defaultValue, isExpression)})` + : ''; + return out; + } + + if (lowered.startsWith('double')) { + let params: + | { precision?: string; scale?: string; unsigned?: boolean } + | undefined; + + if (lowered.length > (lowered.includes('unsigned') ? 15 : 6)) { + const [precision, scale] = lowered + .slice(7, lowered.length - (1 + (lowered.includes('unsigned') ? 9 : 0))) + .split(','); + params = { precision, scale }; + } + + if (lowered.includes('unsigned')) { + params = { ...(params ?? {}), unsigned: true }; + } + + const timeConfigParams = params ? timeConfig(params) : undefined; + + let out = params + ? `${casing(name)}: double(${ + dbColumnName({ name, casing: rawCasing, withMode: timeConfigParams !== undefined }) + }${timeConfig(params)})` + : `${casing(name)}: double(${dbColumnName({ name, casing: rawCasing })})`; + + // let out = `${name.camelCase()}: double("${name}")`; + out += defaultValue + ? `.default(${mapColumnDefault(defaultValue, isExpression)})` + : ''; + return out; + } + + if (lowered.startsWith('float')) { + let params: + | { precision?: string; scale?: string; unsigned?: boolean } + | undefined; + + if (lowered.length > (lowered.includes('unsigned') ? 14 : 5)) { + const [precision, scale] = lowered + .slice(6, lowered.length - (1 + (lowered.includes('unsigned') ? 9 : 0))) + .split(','); + params = { precision, scale }; + } + + if (lowered.includes('unsigned')) { + params = { ...(params ?? {}), unsigned: true }; + } + + let out = `${casing(name)}: float(${dbColumnName({ name, casing: rawCasing })}${params ? timeConfig(params) : ''})`; + out += defaultValue + ? `.default(${mapColumnDefault(defaultValue, isExpression)})` + : ''; + return out; + } + + if (lowered === 'real') { + let out = `${casing(name)}: real(${dbColumnName({ name, casing: rawCasing })})`; + out += defaultValue + ? `.default(${mapColumnDefault(defaultValue, isExpression)})` + : ''; + return out; + } + + if (lowered.startsWith('timestamp')) { + const keyLength = 'timestamp'.length + 1; + let fsp = lowered.length > keyLength + ? Number(lowered.substring(keyLength, lowered.length - 1)) + : null; + fsp = fsp ? fsp : null; + + const params = timeConfig({ fsp, mode: "'string'" }); + + let out = params + ? `${casing(name)}: timestamp(${ + dbColumnName({ name, casing: rawCasing, withMode: params !== undefined }) + }${params})` + : `${casing(name)}: timestamp(${dbColumnName({ name, casing: rawCasing })})`; + + // singlestore has only CURRENT_TIMESTAMP, as I found from docs. But will leave now() for just a case + defaultValue = defaultValue === 'now()' || defaultValue === 'CURRENT_TIMESTAMP' + ? '.defaultNow()' + : defaultValue + ? `.default(${mapColumnDefault(defaultValue, isExpression)})` + : ''; + + out += defaultValue; + + let onUpdateNow = onUpdate ? '.onUpdateNow()' : ''; + out += onUpdateNow; + + return out; + } + + if (lowered.startsWith('time')) { + const keyLength = 'time'.length + 1; + let fsp = lowered.length > keyLength + ? Number(lowered.substring(keyLength, lowered.length - 1)) + : null; + fsp = fsp ? fsp : null; + + const params = timeConfig({ fsp }); + + let out = params + ? `${casing(name)}: time(${dbColumnName({ name, casing: rawCasing, withMode: params !== undefined })}${params})` + : `${casing(name)}: time(${dbColumnName({ name, casing: rawCasing })})`; + + defaultValue = defaultValue === 'now()' + ? '.defaultNow()' + : defaultValue + ? `.default(${mapColumnDefault(defaultValue, isExpression)})` + : ''; + + out += defaultValue; + return out; + } + + if (lowered === 'date') { + let out = `// you can use { mode: 'date' }, if you want to have Date as type for this column\n\t${ + casing( + name, + ) + }: date(${dbColumnName({ name, casing: rawCasing, withMode: true })}{ mode: 'string' })`; + + defaultValue = defaultValue === 'now()' + ? '.defaultNow()' + : defaultValue + ? `.default(${mapColumnDefault(defaultValue, isExpression)})` + : ''; + + out += defaultValue; + return out; + } + + // in singlestore text can't have default value. Will leave it in case smth ;) + if (lowered === 'text') { + let out = `${casing(name)}: text(${dbColumnName({ name, casing: rawCasing })})`; + out += defaultValue + ? `.default(${mapColumnDefault(defaultValue, isExpression)})` + : ''; + return out; + } + + // in singlestore text can't have default value. Will leave it in case smth ;) + if (lowered === 'tinytext') { + let out = `${casing(name)}: tinytext(${dbColumnName({ name, casing: rawCasing })})`; + out += defaultValue + ? `.default(${mapColumnDefault(defaultValue, isExpression)})` + : ''; + return out; + } + + // in singlestore text can't have default value. Will leave it in case smth ;) + if (lowered === 'mediumtext') { + let out = `${casing(name)}: mediumtext(${dbColumnName({ name, casing: rawCasing })})`; + out += defaultValue + ? `.default(${mapColumnDefault(defaultValue, isExpression)})` + : ''; + return out; + } + + // in singlestore text can't have default value. Will leave it in case smth ;) + if (lowered === 'longtext') { + let out = `${casing(name)}: longtext(${dbColumnName({ name, casing: rawCasing })})`; + out += defaultValue + ? `.default(${mapColumnDefault(defaultValue, isExpression)})` + : ''; + return out; + } + + if (lowered === 'year') { + let out = `${casing(name)}: year(${dbColumnName({ name, casing: rawCasing })})`; + out += defaultValue + ? `.default(${mapColumnDefault(defaultValue, isExpression)})` + : ''; + return out; + } + + // in singlestore json can't have default value. Will leave it in case smth ;) + if (lowered === 'json') { + let out = `${casing(name)}: json(${dbColumnName({ name, casing: rawCasing })})`; + + out += defaultValue + ? `.default(${mapColumnDefaultForJson(defaultValue)})` + : ''; + + return out; + } + + if (lowered.startsWith('varchar')) { + let out: string = `${ + casing( + name, + ) + }: varchar(${dbColumnName({ name, casing: rawCasing, withMode: true })}{ length: ${ + lowered.substring( + 'varchar'.length + 1, + lowered.length - 1, + ) + } })`; + + out += defaultValue + ? `.default(${mapColumnDefault(defaultValue, isExpression)})` + : ''; + return out; + } + + if (lowered.startsWith('char')) { + let out: string = `${ + casing( + name, + ) + }: char(${dbColumnName({ name, casing: rawCasing, withMode: true })}{ length: ${ + lowered.substring( + 'char'.length + 1, + lowered.length - 1, + ) + } })`; + + out += defaultValue + ? `.default(${mapColumnDefault(defaultValue, isExpression)})` + : ''; + return out; + } + + if (lowered.startsWith('datetime')) { + let out = `// you can use { mode: 'date' }, if you want to have Date as type for this column\n\t`; + + const fsp = lowered.startsWith('datetime(') + ? lowered.substring('datetime'.length + 1, lowered.length - 1) + : undefined; + + out = fsp + ? `${ + casing( + name, + ) + }: datetime(${dbColumnName({ name, casing: rawCasing, withMode: true })}{ mode: 'string', fsp: ${ + lowered.substring( + 'datetime'.length + 1, + lowered.length - 1, + ) + } })` + : `${casing(name)}: datetime(${dbColumnName({ name, casing: rawCasing, withMode: true })}{ mode: 'string'})`; + + defaultValue = defaultValue === 'now()' + ? '.defaultNow()' + : defaultValue + ? `.default(${mapColumnDefault(defaultValue, isExpression)})` + : ''; + + out += defaultValue; + return out; + } + + if (lowered.startsWith('decimal')) { + let params: + | { precision?: string; scale?: string; unsigned?: boolean } + | undefined; + + if (lowered.length > (lowered.includes('unsigned') ? 16 : 7)) { + const [precision, scale] = lowered + .slice(8, lowered.length - (1 + (lowered.includes('unsigned') ? 9 : 0))) + .split(','); + params = { precision, scale }; + } + + if (lowered.includes('unsigned')) { + params = { ...(params ?? {}), unsigned: true }; + } + + const timeConfigParams = params ? timeConfig(params) : undefined; + + let out = params + ? `${casing(name)}: decimal(${ + dbColumnName({ name, casing: rawCasing, withMode: timeConfigParams !== undefined }) + }${timeConfigParams})` + : `${casing(name)}: decimal(${dbColumnName({ name, casing: rawCasing })})`; + + defaultValue = typeof defaultValue !== 'undefined' + ? `.default(${mapColumnDefault(defaultValue, isExpression)})` + : ''; + + out += defaultValue; + return out; + } + + if (lowered.startsWith('binary')) { + const keyLength = 'binary'.length + 1; + let length = lowered.length > keyLength + ? Number(lowered.substring(keyLength, lowered.length - 1)) + : null; + length = length ? length : null; + + const params = binaryConfig({ length }); + + let out = params + ? `${casing(name)}: binary(${dbColumnName({ name, casing: rawCasing, withMode: params !== undefined })}${params})` + : `${casing(name)}: binary(${dbColumnName({ name, casing: rawCasing })})`; + + defaultValue = defaultValue + ? `.default(${mapColumnDefault(defaultValue, isExpression)})` + : ''; + + out += defaultValue; + return out; + } + + if (lowered.startsWith('enum')) { + const values = lowered.substring('enum'.length + 1, lowered.length - 1); + let out = `${casing(name)}: singlestoreEnum(${ + dbColumnName({ name, casing: rawCasing, withMode: true }) + }[${values}])`; + out += defaultValue + ? `.default(${mapColumnDefault(defaultValue, isExpression)})` + : ''; + return out; + } + + if (lowered.startsWith('varbinary')) { + const keyLength = 'varbinary'.length + 1; + let length = lowered.length > keyLength + ? Number(lowered.substring(keyLength, lowered.length - 1)) + : null; + length = length ? length : null; + + const params = binaryConfig({ length }); + + let out = params + ? `${casing(name)}: varbinary(${ + dbColumnName({ name, casing: rawCasing, withMode: params !== undefined }) + }${params})` + : `${casing(name)}: varbinary(${dbColumnName({ name, casing: rawCasing })})`; + + defaultValue = defaultValue + ? `.default(${mapColumnDefault(defaultValue, isExpression)})` + : ''; + + out += defaultValue; + return out; + } + + console.log('uknown', type); + return `// Warning: Can't parse ${type} from database\n\t// ${type}Type: ${type}("${name}")`; +}; + +const createTableColumns = ( + columns: Column[], + casing: (val: string) => string, + rawCasing: Casing, + tableName: string, + schema: SingleStoreSchemaInternal, +): string => { + let statement = ''; + + columns.forEach((it) => { + statement += '\t'; + statement += column( + it.type, + it.name, + casing, + rawCasing, + it.default, + it.autoincrement, + it.onUpdate, + schema.internal?.tables![tableName]?.columns[it.name] + ?.isDefaultAnExpression ?? false, + ); + statement += it.primaryKey ? '.primaryKey()' : ''; + statement += it.notNull ? '.notNull()' : ''; + + statement += it.generated + ? `.generatedAlwaysAs(sql\`${ + it.generated.as.replace( + /`/g, + '\\`', + ) + }\`, { mode: "${it.generated.type}" })` + : ''; + + statement += ',\n'; + }); + + return statement; +}; + +const createTableIndexes = ( + tableName: string, + idxs: Index[], + casing: (value: string) => string, +): string => { + let statement = ''; + + idxs.forEach((it) => { + let idxKey = it.name.startsWith(tableName) && it.name !== tableName + ? it.name.slice(tableName.length + 1) + : it.name; + idxKey = idxKey.endsWith('_index') + ? idxKey.slice(0, -'_index'.length) + '_idx' + : idxKey; + + idxKey = casing(idxKey); + + const indexGeneratedName = indexName(tableName, it.columns); + const escapedIndexName = indexGeneratedName === it.name ? '' : `"${it.name}"`; + + statement += `\t\t${idxKey}: `; + statement += it.isUnique ? 'uniqueIndex(' : 'index('; + statement += `${escapedIndexName})`; + statement += `.on(${ + it.columns + .map((it) => `table.${casing(it)}`) + .join(', ') + }),`; + statement += `\n`; + }); + + return statement; +}; + +const createTableUniques = ( + unqs: UniqueConstraint[], + casing: (value: string) => string, +): string => { + let statement = ''; + + unqs.forEach((it) => { + const idxKey = casing(it.name); + + statement += `\t\t${idxKey}: `; + statement += 'unique('; + statement += `"${it.name}")`; + statement += `.on(${ + it.columns + .map((it) => `table.${casing(it)}`) + .join(', ') + }),`; + statement += `\n`; + }); + + return statement; +}; + +const createTablePKs = ( + pks: PrimaryKey[], + casing: (value: string) => string, +): string => { + let statement = ''; + + pks.forEach((it) => { + let idxKey = casing(it.name); + + statement += `\t\t${idxKey}: `; + statement += 'primaryKey({ columns: ['; + statement += `${ + it.columns + .map((c) => { + return `table.${casing(c)}`; + }) + .join(', ') + }]${it.name ? `, name: "${it.name}"` : ''}}`; + statement += '),'; + statement += `\n`; + }); + + return statement; +}; diff --git a/drizzle-kit/src/jsonStatements.ts b/drizzle-kit/src/jsonStatements.ts index 18b28fac4..f64020f5a 100644 --- a/drizzle-kit/src/jsonStatements.ts +++ b/drizzle-kit/src/jsonStatements.ts @@ -13,6 +13,7 @@ import { View as PgView, ViewWithOption, } from './serializer/pgSchema'; +import { SingleStoreKitInternals, SingleStoreSchema, SingleStoreSquasher } from './serializer/singlestoreSchema'; import { SQLiteKitInternals, SQLiteSchemaInternal, @@ -50,7 +51,7 @@ export interface JsonCreateTableStatement { uniqueConstraints?: string[]; policies?: string[]; checkConstraints?: string[]; - internals?: MySqlKitInternals; + internals?: MySqlKitInternals | SingleStoreKitInternals; isRLSEnabled?: boolean; } @@ -306,7 +307,7 @@ export interface JsonCreateIndexStatement { tableName: string; data: string; schema: string; - internal?: MySqlKitInternals | SQLiteKitInternals; + internal?: MySqlKitInternals | SQLiteKitInternals | SingleStoreKitInternals; } export interface JsonPgCreateIndexStatement { @@ -673,6 +674,11 @@ export type JsonCreateMySqlViewStatement = { replace: boolean; } & Omit; +/* export type JsonCreateSingleStoreViewStatement = { + type: 'singlestore_create_view'; + replace: boolean; +} & Omit; */ + export type JsonCreateSqliteViewStatement = { type: 'sqlite_create_view'; } & Omit; @@ -756,6 +762,10 @@ export type JsonAlterMySqlViewStatement = { type: 'alter_mysql_view'; } & Omit; +/* export type JsonAlterSingleStoreViewStatement = { + type: 'alter_singlestore_view'; +} & Omit; */ + export type JsonAlterViewStatement = | JsonAlterViewAlterSchemaStatement | JsonAlterViewAddWithOptionStatement @@ -838,6 +848,8 @@ export type JsonStatement = | JsonAlterViewStatement | JsonCreateMySqlViewStatement | JsonAlterMySqlViewStatement + /* | JsonCreateSingleStoreViewStatement + | JsonAlterSingleStoreViewStatement */ | JsonCreateSqliteViewStatement | JsonCreateCheckConstraint | JsonDeleteCheckConstraint @@ -906,6 +918,34 @@ export const prepareMySqlCreateTableJson = ( }; }; +export const prepareSingleStoreCreateTableJson = ( + table: Table, + // TODO: remove? + json2: SingleStoreSchema, + // we need it to know if some of the indexes(and in future other parts) are expressions or columns + // didn't change singlestoreserialaizer, because it will break snapshots and diffs and it's hard to detect + // if previously it was an expression or column + internals: SingleStoreKitInternals, +): JsonCreateTableStatement => { + const { name, schema, columns, compositePrimaryKeys, uniqueConstraints } = table; + + return { + type: 'create_table', + tableName: name, + schema, + columns: Object.values(columns), + compositePKs: Object.values(compositePrimaryKeys), + compositePkName: Object.values(compositePrimaryKeys).length > 0 + ? json2.tables[name].compositePrimaryKeys[ + SingleStoreSquasher.unsquashPK(Object.values(compositePrimaryKeys)[0]) + .name + ].name + : '', + uniqueConstraints: Object.values(uniqueConstraints), + internals, + }; +}; + export const prepareSQLiteCreateTable = ( table: Table, action?: 'push' | undefined, @@ -1207,7 +1247,7 @@ export const prepareDeleteSchemasJson = ( export const prepareRenameColumns = ( tableName: string, - // TODO: split for pg and mysql+sqlite without schema + // TODO: split for pg and mysql+sqlite and singlestore without schema schema: string, pairs: { from: Column; to: Column }[], ): JsonRenameColumnStatement[] => { @@ -1637,6 +1677,363 @@ export const prepareAlterColumnsMysql = ( return [...dropPkStatements, ...setPkStatements, ...statements]; }; +export const prepareAlterColumnsSingleStore = ( + tableName: string, + schema: string, + columns: AlteredColumn[], + // TODO: remove? + json1: CommonSquashedSchema, + json2: CommonSquashedSchema, + action?: 'push' | undefined, +): JsonAlterColumnStatement[] => { + let statements: JsonAlterColumnStatement[] = []; + let dropPkStatements: JsonAlterColumnDropPrimaryKeyStatement[] = []; + let setPkStatements: JsonAlterColumnSetPrimaryKeyStatement[] = []; + + for (const column of columns) { + const columnName = typeof column.name !== 'string' ? column.name.new : column.name; + + const table = json2.tables[tableName]; + const snapshotColumn = table.columns[columnName]; + + const columnType = snapshotColumn.type; + const columnDefault = snapshotColumn.default; + const columnOnUpdate = 'onUpdate' in snapshotColumn ? snapshotColumn.onUpdate : undefined; + const columnNotNull = table.columns[columnName].notNull; + + const columnAutoIncrement = 'autoincrement' in snapshotColumn + ? snapshotColumn.autoincrement ?? false + : false; + + const columnPk = table.columns[columnName].primaryKey; + + if (column.autoincrement?.type === 'added') { + statements.push({ + type: 'alter_table_alter_column_set_autoincrement', + tableName, + columnName, + schema, + newDataType: columnType, + columnDefault, + columnOnUpdate, + columnNotNull, + columnAutoIncrement, + columnPk, + }); + } + + if (column.autoincrement?.type === 'changed') { + const type = column.autoincrement.new + ? 'alter_table_alter_column_set_autoincrement' + : 'alter_table_alter_column_drop_autoincrement'; + + statements.push({ + type, + tableName, + columnName, + schema, + newDataType: columnType, + columnDefault, + columnOnUpdate, + columnNotNull, + columnAutoIncrement, + columnPk, + }); + } + + if (column.autoincrement?.type === 'deleted') { + statements.push({ + type: 'alter_table_alter_column_drop_autoincrement', + tableName, + columnName, + schema, + newDataType: columnType, + columnDefault, + columnOnUpdate, + columnNotNull, + columnAutoIncrement, + columnPk, + }); + } + } + + for (const column of columns) { + const columnName = typeof column.name !== 'string' ? column.name.new : column.name; + + // I used any, because those fields are available only for mysql and singlestore dialect + // For other dialects it will become undefined, that is fine for json statements + const columnType = json2.tables[tableName].columns[columnName].type; + const columnDefault = json2.tables[tableName].columns[columnName].default; + const columnGenerated = json2.tables[tableName].columns[columnName].generated; + const columnOnUpdate = (json2.tables[tableName].columns[columnName] as any) + .onUpdate; + const columnNotNull = json2.tables[tableName].columns[columnName].notNull; + const columnAutoIncrement = ( + json2.tables[tableName].columns[columnName] as any + ).autoincrement; + const columnPk = (json2.tables[tableName].columns[columnName] as any) + .primaryKey; + + const compositePk = json2.tables[tableName].compositePrimaryKeys[ + `${tableName}_${columnName}` + ]; + + if (typeof column.name !== 'string') { + statements.push({ + type: 'alter_table_rename_column', + tableName, + oldColumnName: column.name.old, + newColumnName: column.name.new, + schema, + }); + } + + if (column.type?.type === 'changed') { + statements.push({ + type: 'alter_table_alter_column_set_type', + tableName, + columnName, + newDataType: column.type.new, + oldDataType: column.type.old, + schema, + columnDefault, + columnOnUpdate, + columnNotNull, + columnAutoIncrement, + columnPk, + columnGenerated, + }); + } + + if ( + column.primaryKey?.type === 'deleted' + || (column.primaryKey?.type === 'changed' + && !column.primaryKey.new + && typeof compositePk === 'undefined') + ) { + dropPkStatements.push({ + //// + type: 'alter_table_alter_column_drop_pk', + tableName, + columnName, + schema, + }); + } + + if (column.default?.type === 'added') { + statements.push({ + type: 'alter_table_alter_column_set_default', + tableName, + columnName, + newDefaultValue: column.default.value, + schema, + columnOnUpdate, + columnNotNull, + columnAutoIncrement, + newDataType: columnType, + columnPk, + }); + } + + if (column.default?.type === 'changed') { + statements.push({ + type: 'alter_table_alter_column_set_default', + tableName, + columnName, + newDefaultValue: column.default.new, + oldDefaultValue: column.default.old, + schema, + columnOnUpdate, + columnNotNull, + columnAutoIncrement, + newDataType: columnType, + columnPk, + }); + } + + if (column.default?.type === 'deleted') { + statements.push({ + type: 'alter_table_alter_column_drop_default', + tableName, + columnName, + schema, + columnDefault, + columnOnUpdate, + columnNotNull, + columnAutoIncrement, + newDataType: columnType, + columnPk, + }); + } + + if (column.notNull?.type === 'added') { + statements.push({ + type: 'alter_table_alter_column_set_notnull', + tableName, + columnName, + schema, + newDataType: columnType, + columnDefault, + columnOnUpdate, + columnNotNull, + columnAutoIncrement, + columnPk, + }); + } + + if (column.notNull?.type === 'changed') { + const type = column.notNull.new + ? 'alter_table_alter_column_set_notnull' + : 'alter_table_alter_column_drop_notnull'; + statements.push({ + type: type, + tableName, + columnName, + schema, + newDataType: columnType, + columnDefault, + columnOnUpdate, + columnNotNull, + columnAutoIncrement, + columnPk, + }); + } + + if (column.notNull?.type === 'deleted') { + statements.push({ + type: 'alter_table_alter_column_drop_notnull', + tableName, + columnName, + schema, + newDataType: columnType, + columnDefault, + columnOnUpdate, + columnNotNull, + columnAutoIncrement, + columnPk, + }); + } + + if (column.generated?.type === 'added') { + if (columnGenerated?.type === 'virtual') { + // TODO: Change warning message according to SingleStore docs + warning( + `You are trying to add virtual generated constraint to ${ + chalk.blue( + columnName, + ) + } column. As MySQL docs mention: "Nongenerated columns can be altered to stored but not virtual generated columns". We will drop an existing column and add it with a virtual generated statement. This means that the data previously stored in this column will be wiped, and new data will be generated on each read for this column\n`, + ); + } + statements.push({ + type: 'alter_table_alter_column_set_generated', + tableName, + columnName, + schema, + newDataType: columnType, + columnDefault, + columnOnUpdate, + columnNotNull, + columnAutoIncrement, + columnPk, + columnGenerated, + }); + } + + if (column.generated?.type === 'changed' && action !== 'push') { + statements.push({ + type: 'alter_table_alter_column_alter_generated', + tableName, + columnName, + schema, + newDataType: columnType, + columnDefault, + columnOnUpdate, + columnNotNull, + columnAutoIncrement, + columnPk, + columnGenerated, + }); + } + + if (column.generated?.type === 'deleted') { + if (columnGenerated?.type === 'virtual') { + // TODO: Change warning message according to SingleStore docs + warning( + `You are trying to remove virtual generated constraint from ${ + chalk.blue( + columnName, + ) + } column. As MySQL docs mention: "Stored but not virtual generated columns can be altered to nongenerated columns. The stored generated values become the values of the nongenerated column". We will drop an existing column and add it without a virtual generated statement. This means that this column will have no data after migration\n`, + ); + } + statements.push({ + type: 'alter_table_alter_column_drop_generated', + tableName, + columnName, + schema, + newDataType: columnType, + columnDefault, + columnOnUpdate, + columnNotNull, + columnAutoIncrement, + columnPk, + columnGenerated, + oldColumn: json1.tables[tableName].columns[columnName], + }); + } + + if ( + column.primaryKey?.type === 'added' + || (column.primaryKey?.type === 'changed' && column.primaryKey.new) + ) { + const wasAutoincrement = statements.filter( + (it) => it.type === 'alter_table_alter_column_set_autoincrement', + ); + if (wasAutoincrement.length === 0) { + setPkStatements.push({ + type: 'alter_table_alter_column_set_pk', + tableName, + schema, + columnName, + }); + } + } + + if (column.onUpdate?.type === 'added') { + statements.push({ + type: 'alter_table_alter_column_set_on_update', + tableName, + columnName, + schema, + newDataType: columnType, + columnDefault, + columnOnUpdate, + columnNotNull, + columnAutoIncrement, + columnPk, + }); + } + + if (column.onUpdate?.type === 'deleted') { + statements.push({ + type: 'alter_table_alter_column_drop_on_update', + tableName, + columnName, + schema, + newDataType: columnType, + columnDefault, + columnOnUpdate, + columnNotNull, + columnAutoIncrement, + columnPk, + }); + } + } + + return [...dropPkStatements, ...setPkStatements, ...statements]; +}; + export const preparePgAlterColumns = ( _tableName: string, schema: string, @@ -2704,8 +3101,11 @@ export const prepareAddCompositePrimaryKeyPg = ( tableName: string, schema: string, pks: Record, + // TODO: remove? + json2: PgSchema, ): JsonCreateCompositePK[] => { return Object.values(pks).map((it) => { + const unsquashed = PgSquasher.unsquashPK(it); return { type: 'create_composite_pk', tableName, @@ -2720,6 +3120,8 @@ export const prepareDeleteCompositePrimaryKeyPg = ( tableName: string, schema: string, pks: Record, + // TODO: remove? + json1: PgSchema, ): JsonDeleteCompositePK[] => { return Object.values(pks).map((it) => { return { @@ -2736,6 +3138,9 @@ export const prepareAlterCompositePrimaryKeyPg = ( tableName: string, schema: string, pks: Record, + // TODO: remove? + json1: PgSchema, + json2: PgSchema, ): JsonAlterCompositePK[] => { return Object.values(pks).map((it) => { return { @@ -2876,7 +3281,6 @@ export const prepareDeleteCompositePrimaryKeyMySql = ( type: 'delete_composite_pk', tableName, data: it, - constraintName: unsquashed.name, } as JsonDeleteCompositePK; }); }; @@ -2945,6 +3349,24 @@ export const prepareMySqlCreateViewJson = ( }; }; +/* export const prepareSingleStoreCreateViewJson = ( + name: string, + definition: string, + meta: string, + replace: boolean = false, +): JsonCreateSingleStoreViewStatement => { + const { algorithm, sqlSecurity, withCheckOption } = SingleStoreSquasher.unsquashView(meta); + return { + type: 'singlestore_create_view', + name: name, + definition: definition, + algorithm, + sqlSecurity, + withCheckOption, + replace, + }; +}; */ + export const prepareSqliteCreateViewJson = ( name: string, definition: string, @@ -3070,3 +3492,9 @@ export const prepareMySqlAlterView = ( ): JsonAlterMySqlViewStatement => { return { type: 'alter_mysql_view', ...view }; }; + +/* export const prepareSingleStoreAlterView = ( + view: Omit, +): JsonAlterSingleStoreViewStatement => { + return { type: 'alter_singlestore_view', ...view }; +}; */ diff --git a/drizzle-kit/src/migrationPreparator.ts b/drizzle-kit/src/migrationPreparator.ts index d61f804ca..4e67e8174 100644 --- a/drizzle-kit/src/migrationPreparator.ts +++ b/drizzle-kit/src/migrationPreparator.ts @@ -1,9 +1,10 @@ import { randomUUID } from 'crypto'; import fs from 'fs'; import { CasingType } from './cli/validations/common'; -import { serializeMySql, serializePg, serializeSQLite } from './serializer'; +import { serializeMySql, serializePg, serializeSingleStore, serializeSQLite } from './serializer'; import { dryMySql, MySqlSchema, mysqlSchema } from './serializer/mysqlSchema'; import { dryPg, PgSchema, pgSchema, PgSchemaInternal } from './serializer/pgSchema'; +import { drySingleStore, SingleStoreSchema, singlestoreSchema } from './serializer/singlestoreSchema'; import { drySQLite, SQLiteSchema, sqliteSchema } from './serializer/sqliteSchema'; export const prepareMySqlDbPushSnapshot = async ( @@ -22,6 +23,22 @@ export const prepareMySqlDbPushSnapshot = async ( return { prev, cur: result }; }; +export const prepareSingleStoreDbPushSnapshot = async ( + prev: SingleStoreSchema, + schemaPath: string | string[], + casing: CasingType | undefined, +): Promise<{ prev: SingleStoreSchema; cur: SingleStoreSchema }> => { + const serialized = await serializeSingleStore(schemaPath, casing); + + const id = randomUUID(); + const idPrev = prev.id; + + const { version, dialect, ...rest } = serialized; + const result: SingleStoreSchema = { version, dialect, id, prevId: idPrev, ...rest }; + + return { prev, cur: result }; +}; + export const prepareSQLiteDbPushSnapshot = async ( prev: SQLiteSchema, schemaPath: string | string[], @@ -89,6 +106,34 @@ export const prepareMySqlMigrationSnapshot = async ( return { prev: prevSnapshot, cur: result, custom }; }; +export const prepareSingleStoreMigrationSnapshot = async ( + migrationFolders: string[], + schemaPath: string | string[], + casing: CasingType | undefined, +): Promise<{ prev: SingleStoreSchema; cur: SingleStoreSchema; custom: SingleStoreSchema }> => { + const prevSnapshot = singlestoreSchema.parse( + preparePrevSnapshot(migrationFolders, drySingleStore), + ); + const serialized = await serializeSingleStore(schemaPath, casing); + + const id = randomUUID(); + const idPrev = prevSnapshot.id; + + const { version, dialect, ...rest } = serialized; + const result: SingleStoreSchema = { version, dialect, id, prevId: idPrev, ...rest }; + + const { id: _ignoredId, prevId: _ignoredPrevId, ...prevRest } = prevSnapshot; + + // that's for custom migrations, when we need new IDs, but old snapshot + const custom: SingleStoreSchema = { + id, + prevId: idPrev, + ...prevRest, + }; + + return { prev: prevSnapshot, cur: result, custom }; +}; + export const prepareSqliteMigrationSnapshot = async ( snapshots: string[], schemaPath: string | string[], diff --git a/drizzle-kit/src/schemaValidator.ts b/drizzle-kit/src/schemaValidator.ts index 6ad29a544..e91b5ab11 100644 --- a/drizzle-kit/src/schemaValidator.ts +++ b/drizzle-kit/src/schemaValidator.ts @@ -1,9 +1,10 @@ import { enum as enumType, TypeOf, union } from 'zod'; import { mysqlSchema, mysqlSchemaSquashed } from './serializer/mysqlSchema'; import { pgSchema, pgSchemaSquashed } from './serializer/pgSchema'; +import { singlestoreSchema, singlestoreSchemaSquashed } from './serializer/singlestoreSchema'; import { sqliteSchema, SQLiteSchemaSquashed } from './serializer/sqliteSchema'; -export const dialects = ['postgresql', 'mysql', 'sqlite', 'turso'] as const; +export const dialects = ['postgresql', 'mysql', 'sqlite', 'turso', 'singlestore'] as const; export const dialect = enumType(dialects); export type Dialect = (typeof dialects)[number]; @@ -13,9 +14,10 @@ const commonSquashedSchema = union([ pgSchemaSquashed, mysqlSchemaSquashed, SQLiteSchemaSquashed, + singlestoreSchemaSquashed, ]); -const commonSchema = union([pgSchema, mysqlSchema, sqliteSchema]); +const commonSchema = union([pgSchema, mysqlSchema, sqliteSchema, singlestoreSchema]); export type CommonSquashedSchema = TypeOf; export type CommonSchema = TypeOf; diff --git a/drizzle-kit/src/serializer/index.ts b/drizzle-kit/src/serializer/index.ts index cf2ee625a..d24afbab0 100644 --- a/drizzle-kit/src/serializer/index.ts +++ b/drizzle-kit/src/serializer/index.ts @@ -6,6 +6,7 @@ import { CasingType } from 'src/cli/validations/common'; import { error } from '../cli/views'; import type { MySqlSchemaInternal } from './mysqlSchema'; import type { PgSchemaInternal } from './pgSchema'; +import { SingleStoreSchemaInternal } from './singlestoreSchema'; import type { SQLiteSchemaInternal } from './sqliteSchema'; export const serializeMySql = async ( @@ -53,6 +54,22 @@ export const serializeSQLite = async ( return generateSqliteSnapshot(tables, views, casing); }; +export const serializeSingleStore = async ( + path: string | string[], + casing: CasingType | undefined, +): Promise => { + const filenames = prepareFilenames(path); + + console.log(chalk.gray(`Reading schema files:\n${filenames.join('\n')}\n`)); + + const { prepareFromSingleStoreImports } = await import('./singlestoreImports'); + const { generateSingleStoreSnapshot } = await import('./singlestoreSerializer'); + + const { tables /* views */ } = await prepareFromSingleStoreImports(filenames); + + return generateSingleStoreSnapshot(tables, /* views, */ casing); +}; + export const prepareFilenames = (path: string | string[]) => { if (typeof path === 'string') { path = [path]; diff --git a/drizzle-kit/src/serializer/singlestoreImports.ts b/drizzle-kit/src/serializer/singlestoreImports.ts new file mode 100644 index 000000000..23c2d66a9 --- /dev/null +++ b/drizzle-kit/src/serializer/singlestoreImports.ts @@ -0,0 +1,38 @@ +import { is } from 'drizzle-orm'; +import { AnySingleStoreTable, SingleStoreTable } from 'drizzle-orm/singlestore-core'; +import { safeRegister } from '../cli/commands/utils'; + +export const prepareFromExports = (exports: Record) => { + const tables: AnySingleStoreTable[] = []; + /* const views: SingleStoreView[] = []; */ + + const i0values = Object.values(exports); + i0values.forEach((t) => { + if (is(t, SingleStoreTable)) { + tables.push(t); + } + + /* if (is(t, SingleStoreView)) { + views.push(t); + } */ + }); + + return { tables /* views */ }; +}; + +export const prepareFromSingleStoreImports = async (imports: string[]) => { + const tables: AnySingleStoreTable[] = []; + /* const views: SingleStoreView[] = []; */ + + const { unregister } = await safeRegister(); + for (let i = 0; i < imports.length; i++) { + const it = imports[i]; + const i0: Record = require(`${it}`); + const prepared = prepareFromExports(i0); + + tables.push(...prepared.tables); + /* views.push(...prepared.views); */ + } + unregister(); + return { tables: Array.from(new Set(tables)) /* , views */ }; +}; diff --git a/drizzle-kit/src/serializer/singlestoreSchema.ts b/drizzle-kit/src/serializer/singlestoreSchema.ts new file mode 100644 index 000000000..9ff45ef5a --- /dev/null +++ b/drizzle-kit/src/serializer/singlestoreSchema.ts @@ -0,0 +1,257 @@ +import { any, boolean, enum as enumType, literal, object, record, string, TypeOf, union } from 'zod'; +import { mapValues, originUUID, snapshotVersion } from '../global'; + +// ------- V3 -------- +const index = object({ + name: string(), + columns: string().array(), + isUnique: boolean(), + using: enumType(['btree', 'hash']).optional(), + algorithm: enumType(['default', 'inplace', 'copy']).optional(), + lock: enumType(['default', 'none', 'shared', 'exclusive']).optional(), +}).strict(); + +const column = object({ + name: string(), + type: string(), + primaryKey: boolean(), + notNull: boolean(), + autoincrement: boolean().optional(), + default: any().optional(), + onUpdate: any().optional(), + generated: object({ + type: enumType(['stored', 'virtual']), + as: string(), + }).optional(), +}).strict(); + +const compositePK = object({ + name: string(), + columns: string().array(), +}).strict(); + +const uniqueConstraint = object({ + name: string(), + columns: string().array(), +}).strict(); + +const table = object({ + name: string(), + columns: record(string(), column), + indexes: record(string(), index), + compositePrimaryKeys: record(string(), compositePK), + uniqueConstraints: record(string(), uniqueConstraint).default({}), +}).strict(); + +const viewMeta = object({ + algorithm: enumType(['undefined', 'merge', 'temptable']), + sqlSecurity: enumType(['definer', 'invoker']), + withCheckOption: enumType(['local', 'cascaded']).optional(), +}).strict(); + +/* export const view = object({ + name: string(), + columns: record(string(), column), + definition: string().optional(), + isExisting: boolean(), +}).strict().merge(viewMeta); +type SquasherViewMeta = Omit, 'definer'>; */ + +export const kitInternals = object({ + tables: record( + string(), + object({ + columns: record( + string(), + object({ isDefaultAnExpression: boolean().optional() }).optional(), + ), + }).optional(), + ).optional(), + indexes: record( + string(), + object({ + columns: record( + string(), + object({ isExpression: boolean().optional() }).optional(), + ), + }).optional(), + ).optional(), +}).optional(); + +// use main dialect +const dialect = literal('singlestore'); + +const schemaHash = object({ + id: string(), + prevId: string(), +}); + +export const schemaInternal = object({ + version: literal('1'), + dialect: dialect, + tables: record(string(), table), + /* views: record(string(), view).default({}), */ + _meta: object({ + tables: record(string(), string()), + columns: record(string(), string()), + }), + internal: kitInternals, +}).strict(); + +export const schema = schemaInternal.merge(schemaHash); + +const tableSquashed = object({ + name: string(), + columns: record(string(), column), + indexes: record(string(), string()), + compositePrimaryKeys: record(string(), string()), + uniqueConstraints: record(string(), string()).default({}), +}).strict(); + +/* const viewSquashed = view.omit({ + algorithm: true, + sqlSecurity: true, + withCheckOption: true, +}).extend({ meta: string() }); */ + +export const schemaSquashed = object({ + version: literal('1'), + dialect: dialect, + tables: record(string(), tableSquashed), + /* views: record(string(), viewSquashed), */ +}).strict(); + +export type Dialect = TypeOf; +export type Column = TypeOf; +export type Table = TypeOf; +export type SingleStoreSchema = TypeOf; +export type SingleStoreSchemaInternal = TypeOf; +export type SingleStoreKitInternals = TypeOf; +export type SingleStoreSchemaSquashed = TypeOf; +export type Index = TypeOf; +export type PrimaryKey = TypeOf; +export type UniqueConstraint = TypeOf; +/* export type View = TypeOf; */ +/* export type ViewSquashed = TypeOf; */ + +export const SingleStoreSquasher = { + squashIdx: (idx: Index) => { + index.parse(idx); + return `${idx.name};${idx.columns.join(',')};${idx.isUnique};${idx.using ?? ''};${idx.algorithm ?? ''};${ + idx.lock ?? '' + }`; + }, + unsquashIdx: (input: string): Index => { + const [name, columnsString, isUnique, using, algorithm, lock] = input.split(';'); + const destructed = { + name, + columns: columnsString.split(','), + isUnique: isUnique === 'true', + using: using ? using : undefined, + algorithm: algorithm ? algorithm : undefined, + lock: lock ? lock : undefined, + }; + return index.parse(destructed); + }, + squashPK: (pk: PrimaryKey) => { + return `${pk.name};${pk.columns.join(',')}`; + }, + unsquashPK: (pk: string): PrimaryKey => { + const splitted = pk.split(';'); + return { name: splitted[0], columns: splitted[1].split(',') }; + }, + squashUnique: (unq: UniqueConstraint) => { + return `${unq.name};${unq.columns.join(',')}`; + }, + unsquashUnique: (unq: string): UniqueConstraint => { + const [name, columns] = unq.split(';'); + return { name, columns: columns.split(',') }; + }, + /* squashView: (view: View): string => { + return `${view.algorithm};${view.sqlSecurity};${view.withCheckOption}`; + }, + unsquashView: (meta: string): SquasherViewMeta => { + const [algorithm, sqlSecurity, withCheckOption] = meta.split(';'); + const toReturn = { + algorithm: algorithm, + sqlSecurity: sqlSecurity, + withCheckOption: withCheckOption !== 'undefined' ? withCheckOption : undefined, + }; + + return viewMeta.parse(toReturn); + }, */ +}; + +export const squashSingleStoreScheme = (json: SingleStoreSchema): SingleStoreSchemaSquashed => { + const mappedTables = Object.fromEntries( + Object.entries(json.tables).map((it) => { + const squashedIndexes = mapValues(it[1].indexes, (index) => { + return SingleStoreSquasher.squashIdx(index); + }); + + const squashedPKs = mapValues(it[1].compositePrimaryKeys, (pk) => { + return SingleStoreSquasher.squashPK(pk); + }); + + const squashedUniqueConstraints = mapValues( + it[1].uniqueConstraints, + (unq) => { + return SingleStoreSquasher.squashUnique(unq); + }, + ); + + return [ + it[0], + { + name: it[1].name, + columns: it[1].columns, + indexes: squashedIndexes, + compositePrimaryKeys: squashedPKs, + uniqueConstraints: squashedUniqueConstraints, + }, + ]; + }), + ); + + /* const mappedViews = Object.fromEntries( + Object.entries(json.views).map(([key, value]) => { + const meta = SingleStoreSquasher.squashView(value); + + return [key, { + name: value.name, + isExisting: value.isExisting, + columns: value.columns, + definition: value.definition, + meta, + }]; + }), + ); */ + + return { + version: '1', + dialect: json.dialect, + tables: mappedTables, + /* views: mappedViews, */ + }; +}; + +export const singlestoreSchema = schema; +export const singlestoreSchemaSquashed = schemaSquashed; + +// no prev version +export const backwardCompatibleSingleStoreSchema = union([singlestoreSchema, schema]); + +export const drySingleStore = singlestoreSchema.parse({ + version: '1', + dialect: 'singlestore', + id: originUUID, + prevId: '', + tables: {}, + schemas: {}, + /* views: {}, */ + _meta: { + schemas: {}, + tables: {}, + columns: {}, + }, +}); diff --git a/drizzle-kit/src/serializer/singlestoreSerializer.ts b/drizzle-kit/src/serializer/singlestoreSerializer.ts new file mode 100644 index 000000000..e8c89f1d1 --- /dev/null +++ b/drizzle-kit/src/serializer/singlestoreSerializer.ts @@ -0,0 +1,767 @@ +import chalk from 'chalk'; +import { is, SQL } from 'drizzle-orm'; +import { + AnySingleStoreTable, + getTableConfig, + type PrimaryKey as PrimaryKeyORM, + SingleStoreDialect, + uniqueKeyName, +} from 'drizzle-orm/singlestore-core'; +import { RowDataPacket } from 'mysql2/promise'; +import { withStyle } from '../cli/validations/outputs'; +import { IntrospectStage, IntrospectStatus } from '../cli/views'; + +import { CasingType } from 'src/cli/validations/common'; +import type { DB } from '../utils'; +import { + Column, + Index, + PrimaryKey, + SingleStoreKitInternals, + SingleStoreSchemaInternal, + Table, + UniqueConstraint, +} from './singlestoreSchema'; +import { sqlToStr } from './utils'; + +const dialect = new SingleStoreDialect(); + +export const indexName = (tableName: string, columns: string[]) => { + return `${tableName}_${columns.join('_')}_index`; +}; + +export const generateSingleStoreSnapshot = ( + tables: AnySingleStoreTable[], + /* views: SingleStoreView[], */ + casing: CasingType | undefined, +): SingleStoreSchemaInternal => { + const dialect = new SingleStoreDialect({ casing }); + const result: Record = {}; + /* const resultViews: Record = {}; */ + const internal: SingleStoreKitInternals = { tables: {}, indexes: {} }; + for (const table of tables) { + const { + name: tableName, + columns, + indexes, + schema, + primaryKeys, + uniqueConstraints, + } = getTableConfig(table); + const columnsObject: Record = {}; + const indexesObject: Record = {}; + const primaryKeysObject: Record = {}; + const uniqueConstraintObject: Record = {}; + + columns.forEach((column) => { + const notNull: boolean = column.notNull; + const sqlTypeLowered = column.getSQLType().toLowerCase(); + const autoIncrement = typeof (column as any).autoIncrement === 'undefined' + ? false + : (column as any).autoIncrement; + + const generated = column.generated; + + const columnToSet: Column = { + name: column.name, + type: column.getSQLType(), + primaryKey: false, + // If field is autoincrement it's notNull by default + // notNull: autoIncrement ? true : notNull, + notNull, + autoincrement: autoIncrement, + onUpdate: (column as any).hasOnUpdateNow, + generated: generated + ? { + as: is(generated.as, SQL) + ? dialect.sqlToQuery(generated.as as SQL).sql + : typeof generated.as === 'function' + ? dialect.sqlToQuery(generated.as() as SQL).sql + : (generated.as as any), + type: generated.mode ?? 'stored', + } + : undefined, + }; + + if (column.primary) { + primaryKeysObject[`${tableName}_${column.name}`] = { + name: `${tableName}_${column.name}`, + columns: [column.name], + }; + } + + if (column.isUnique) { + const existingUnique = uniqueConstraintObject[column.uniqueName!]; + if (typeof existingUnique !== 'undefined') { + console.log( + `\n${ + withStyle.errorWarning(`We\'ve found duplicated unique constraint names in ${ + chalk.underline.blue( + tableName, + ) + } table. + The unique constraint ${ + chalk.underline.blue( + column.uniqueName, + ) + } on the ${ + chalk.underline.blue( + column.name, + ) + } column is confilcting with a unique constraint name already defined for ${ + chalk.underline.blue( + existingUnique.columns.join(','), + ) + } columns\n`) + }`, + ); + process.exit(1); + } + uniqueConstraintObject[column.uniqueName!] = { + name: column.uniqueName!, + columns: [columnToSet.name], + }; + } + + if (column.default !== undefined) { + if (is(column.default, SQL)) { + columnToSet.default = sqlToStr(column.default, casing); + } else { + if (typeof column.default === 'string') { + columnToSet.default = `'${column.default}'`; + } else { + if (sqlTypeLowered === 'json') { + columnToSet.default = `'${JSON.stringify(column.default)}'`; + } else if (column.default instanceof Date) { + if (sqlTypeLowered === 'date') { + columnToSet.default = `'${column.default.toISOString().split('T')[0]}'`; + } else if ( + sqlTypeLowered.startsWith('datetime') + || sqlTypeLowered.startsWith('timestamp') + ) { + columnToSet.default = `'${ + column.default + .toISOString() + .replace('T', ' ') + .slice(0, 23) + }'`; + } + } else { + columnToSet.default = column.default; + } + } + // if (['blob', 'text', 'json'].includes(column.getSQLType())) { + // columnToSet.default = `(${columnToSet.default})`; + // } + } + } + columnsObject[column.name] = columnToSet; + }); + + primaryKeys.map((pk: PrimaryKeyORM) => { + const columnNames = pk.columns.map((c: any) => c.name); + primaryKeysObject[pk.getName()] = { + name: pk.getName(), + columns: columnNames, + }; + + // all composite pk's should be treated as notNull + for (const column of pk.columns) { + columnsObject[column.name].notNull = true; + } + }); + + uniqueConstraints?.map((unq) => { + const columnNames = unq.columns.map((c) => c.name); + + const name = unq.name ?? uniqueKeyName(table, columnNames); + + const existingUnique = uniqueConstraintObject[name]; + if (typeof existingUnique !== 'undefined') { + console.log( + `\n${ + withStyle.errorWarning( + `We\'ve found duplicated unique constraint names in ${ + chalk.underline.blue( + tableName, + ) + } table. \nThe unique constraint ${ + chalk.underline.blue( + name, + ) + } on the ${ + chalk.underline.blue( + columnNames.join(','), + ) + } columns is confilcting with a unique constraint name already defined for ${ + chalk.underline.blue( + existingUnique.columns.join(','), + ) + } columns\n`, + ) + }`, + ); + process.exit(1); + } + + uniqueConstraintObject[name] = { + name: unq.name!, + columns: columnNames, + }; + }); + + indexes.forEach((value) => { + const columns = value.config.columns; + const name = value.config.name; + + let indexColumns = columns.map((it) => { + if (is(it, SQL)) { + const sql = dialect.sqlToQuery(it, 'indexes').sql; + if (typeof internal!.indexes![name] === 'undefined') { + internal!.indexes![name] = { + columns: { + [sql]: { + isExpression: true, + }, + }, + }; + } else { + if (typeof internal!.indexes![name]?.columns[sql] === 'undefined') { + internal!.indexes![name]!.columns[sql] = { + isExpression: true, + }; + } else { + internal!.indexes![name]!.columns[sql]!.isExpression = true; + } + } + return sql; + } else { + return `${it.name}`; + } + }); + + if (value.config.unique) { + if (typeof uniqueConstraintObject[name] !== 'undefined') { + console.log( + `\n${ + withStyle.errorWarning( + `We\'ve found duplicated unique constraint names in ${ + chalk.underline.blue( + tableName, + ) + } table. \nThe unique index ${ + chalk.underline.blue( + name, + ) + } on the ${ + chalk.underline.blue( + indexColumns.join(','), + ) + } columns is confilcting with a unique constraint name already defined for ${ + chalk.underline.blue( + uniqueConstraintObject[name].columns.join(','), + ) + } columns\n`, + ) + }`, + ); + process.exit(1); + } + } + + indexesObject[name] = { + name, + columns: indexColumns, + isUnique: value.config.unique ?? false, + using: value.config.using, + algorithm: value.config.algorythm, + lock: value.config.lock, + }; + }); + + // only handle tables without schemas + if (!schema) { + result[tableName] = { + name: tableName, + columns: columnsObject, + indexes: indexesObject, + compositePrimaryKeys: primaryKeysObject, + uniqueConstraints: uniqueConstraintObject, + }; + } + } + + /* for (const view of views) { + const { + isExisting, + name, + query, + schema, + selectedFields, + algorithm, + sqlSecurity, + withCheckOption, + } = getViewConfig(view); + + const columnsObject: Record = {}; + + const existingView = resultViews[name]; + if (typeof existingView !== 'undefined') { + console.log( + `\n${ + withStyle.errorWarning( + `We\'ve found duplicated view name across ${ + chalk.underline.blue( + schema ?? 'public', + ) + } schema. Please rename your view`, + ) + }`, + ); + process.exit(1); + } + for (const key in selectedFields) { + if (is(selectedFields[key], SingleStoreColumn)) { + const column = selectedFields[key]; + + const notNull: boolean = column.notNull; + const sqlTypeLowered = column.getSQLType().toLowerCase(); + const autoIncrement = typeof (column as any).autoIncrement === 'undefined' + ? false + : (column as any).autoIncrement; + + const generated = column.generated; + + const columnToSet: Column = { + name: column.name, + type: column.getSQLType(), + primaryKey: false, + // If field is autoincrement it's notNull by default + // notNull: autoIncrement ? true : notNull, + notNull, + autoincrement: autoIncrement, + onUpdate: (column as any).hasOnUpdateNow, + generated: generated + ? { + as: is(generated.as, SQL) + ? dialect.sqlToQuery(generated.as as SQL).sql + : typeof generated.as === 'function' + ? dialect.sqlToQuery(generated.as() as SQL).sql + : (generated.as as any), + type: generated.mode ?? 'stored', + } + : undefined, + }; + + if (column.default !== undefined) { + if (is(column.default, SQL)) { + columnToSet.default = sqlToStr(column.default, casing); + } else { + if (typeof column.default === 'string') { + columnToSet.default = `'${column.default}'`; + } else { + if (sqlTypeLowered === 'json') { + columnToSet.default = `'${JSON.stringify(column.default)}'`; + } else if (column.default instanceof Date) { + if (sqlTypeLowered === 'date') { + columnToSet.default = `'${column.default.toISOString().split('T')[0]}'`; + } else if ( + sqlTypeLowered.startsWith('datetime') + || sqlTypeLowered.startsWith('timestamp') + ) { + columnToSet.default = `'${ + column.default + .toISOString() + .replace('T', ' ') + .slice(0, 23) + }'`; + } + } else { + columnToSet.default = column.default; + } + } + } + } + columnsObject[column.name] = columnToSet; + } + } + + resultViews[name] = { + columns: columnsObject, + name, + isExisting, + definition: isExisting ? undefined : dialect.sqlToQuery(query!).sql, + withCheckOption, + algorithm: algorithm ?? 'undefined', // set default values + sqlSecurity: sqlSecurity ?? 'definer', // set default values + }; + } */ + + return { + version: '1', + dialect: 'singlestore', + tables: result, + /* views: resultViews, */ + _meta: { + tables: {}, + columns: {}, + }, + internal, + }; +}; + +function clearDefaults(defaultValue: any, collate: string) { + if (typeof collate === 'undefined' || collate === null) { + collate = `utf8mb4`; + } + + let resultDefault = defaultValue; + collate = `_${collate}`; + if (defaultValue.startsWith(collate)) { + resultDefault = resultDefault + .substring(collate.length, defaultValue.length) + .replace(/\\/g, ''); + if (resultDefault.startsWith("'") && resultDefault.endsWith("'")) { + return `('${resultDefault.substring(1, resultDefault.length - 1)}')`; + } else { + return `'${resultDefault}'`; + } + } else { + return `(${resultDefault})`; + } +} + +export const fromDatabase = async ( + db: DB, + inputSchema: string, + tablesFilter: (table: string) => boolean = (table) => true, + progressCallback?: ( + stage: IntrospectStage, + count: number, + status: IntrospectStatus, + ) => void, +): Promise => { + const result: Record = {}; + const internals: SingleStoreKitInternals = { tables: {}, indexes: {} }; + + const columns = await db.query(`select * from information_schema.columns + where table_schema = '${inputSchema}' and table_name != '__drizzle_migrations' + order by table_name, ordinal_position;`); + + const response = columns as RowDataPacket[]; + + const schemas: string[] = []; + + let columnsCount = 0; + let tablesCount = new Set(); + let indexesCount = 0; + /* let viewsCount = 0; */ + + const idxs = await db.query( + `select * from INFORMATION_SCHEMA.STATISTICS + WHERE INFORMATION_SCHEMA.STATISTICS.TABLE_SCHEMA = '${inputSchema}' and INFORMATION_SCHEMA.STATISTICS.INDEX_NAME != 'PRIMARY';`, + ); + + const idxRows = idxs as RowDataPacket[]; + + for (const column of response) { + if (!tablesFilter(column['TABLE_NAME'] as string)) continue; + + columnsCount += 1; + if (progressCallback) { + progressCallback('columns', columnsCount, 'fetching'); + } + const schema: string = column['TABLE_SCHEMA']; + const tableName = column['TABLE_NAME']; + + tablesCount.add(`${schema}.${tableName}`); + if (progressCallback) { + progressCallback('columns', tablesCount.size, 'fetching'); + } + const columnName: string = column['COLUMN_NAME']; + const isNullable = column['IS_NULLABLE'] === 'YES'; // 'YES', 'NO' + const dataType = column['DATA_TYPE']; // varchar + const columnType = column['COLUMN_TYPE']; // varchar(256) + // const columnType = column["DATA_TYPE"]; + const isPrimary = column['COLUMN_KEY'] === 'PRI'; // 'PRI', '' + let columnDefault: string | null = column['COLUMN_DEFAULT']; + const collation: string = column['CHARACTER_SET_NAME']; + const geenratedExpression: string = column['GENERATION_EXPRESSION']; + + let columnExtra = column['EXTRA']; + let isAutoincrement = false; // 'auto_increment', '' + let isDefaultAnExpression = false; // 'auto_increment', '' + + if (typeof column['EXTRA'] !== 'undefined') { + columnExtra = column['EXTRA']; + isAutoincrement = column['EXTRA'] === 'auto_increment'; // 'auto_increment', '' + isDefaultAnExpression = column['EXTRA'].includes('DEFAULT_GENERATED'); // 'auto_increment', '' + } + + // if (isPrimary) { + // if (typeof tableToPk[tableName] === "undefined") { + // tableToPk[tableName] = [columnName]; + // } else { + // tableToPk[tableName].push(columnName); + // } + // } + + if (schema !== inputSchema) { + schemas.push(schema); + } + + const table = result[tableName]; + + // let changedType = columnType.replace("bigint unsigned", "serial") + let changedType = columnType; + + if (columnType === 'bigint unsigned' && !isNullable && isAutoincrement) { + // check unique here + const uniqueIdx = idxRows.filter( + (it) => + it['COLUMN_NAME'] === columnName + && it['TABLE_NAME'] === tableName + && it['NON_UNIQUE'] === 0, + ); + if (uniqueIdx && uniqueIdx.length === 1) { + changedType = columnType.replace('bigint unsigned', 'serial'); + } + } + + if ( + columnType.startsWith('bigint(') + || columnType.startsWith('tinyint(') + || columnType.startsWith('date(') + || columnType.startsWith('int(') + || columnType.startsWith('mediumint(') + || columnType.startsWith('smallint(') + || columnType.startsWith('text(') + || columnType.startsWith('time(') + || columnType.startsWith('year(') + ) { + changedType = columnType.replace(/\(\s*[^)]*\)$/, ''); + } + + if (columnType.includes('decimal(10,0)')) { + changedType = columnType.replace('decimal(10,0)', 'decimal'); + } + + if (columnDefault?.endsWith('.')) { + columnDefault = columnDefault.slice(0, -1); + } + + let onUpdate: boolean | undefined = undefined; + if ( + columnType.startsWith('timestamp') + && typeof columnExtra !== 'undefined' + && columnExtra.includes('on update CURRENT_TIMESTAMP') + ) { + onUpdate = true; + } + + const newColumn: Column = { + default: columnDefault === null + ? undefined + : /^-?[\d.]+(?:e-?\d+)?$/.test(columnDefault) + && !['decimal', 'char', 'varchar'].some((type) => columnType.startsWith(type)) + ? Number(columnDefault) + : isDefaultAnExpression + ? clearDefaults(columnDefault, collation) + : columnDefault.startsWith('CURRENT_TIMESTAMP') + ? 'CURRENT_TIMESTAMP' + : `'${columnDefault}'`, + autoincrement: isAutoincrement, + name: columnName, + type: changedType, + primaryKey: false, + notNull: !isNullable, + onUpdate, + generated: geenratedExpression + ? { + as: geenratedExpression, + type: columnExtra === 'VIRTUAL GENERATED' ? 'virtual' : 'stored', + } + : undefined, + }; + + // Set default to internal object + if (isDefaultAnExpression) { + if (typeof internals!.tables![tableName] === 'undefined') { + internals!.tables![tableName] = { + columns: { + [columnName]: { + isDefaultAnExpression: true, + }, + }, + }; + } else { + if ( + typeof internals!.tables![tableName]!.columns[columnName] + === 'undefined' + ) { + internals!.tables![tableName]!.columns[columnName] = { + isDefaultAnExpression: true, + }; + } else { + internals!.tables![tableName]!.columns[ + columnName + ]!.isDefaultAnExpression = true; + } + } + } + + if (!table) { + result[tableName] = { + name: tableName, + columns: { + [columnName]: newColumn, + }, + compositePrimaryKeys: {}, + indexes: {}, + uniqueConstraints: {}, + }; + } else { + result[tableName]!.columns[columnName] = newColumn; + } + } + + const tablePks = await db.query( + `SELECT table_name, column_name, ordinal_position + FROM information_schema.table_constraints t + LEFT JOIN information_schema.key_column_usage k + USING(constraint_name,table_schema,table_name) + WHERE t.constraint_type='UNIQUE' + and table_name != '__drizzle_migrations' + AND t.table_schema = '${inputSchema}' + ORDER BY ordinal_position`, + ); + + const tableToPk: { [tname: string]: string[] } = {}; + + const tableToPkRows = tablePks as RowDataPacket[]; + for (const tableToPkRow of tableToPkRows) { + const tableName: string = tableToPkRow['table_name']; + const columnName: string = tableToPkRow['column_name']; + const position: string = tableToPkRow['ordinal_position']; + + if (typeof result[tableName] === 'undefined') { + continue; + } + + if (typeof tableToPk[tableName] === 'undefined') { + tableToPk[tableName] = [columnName]; + } else { + tableToPk[tableName].push(columnName); + } + } + + for (const [key, value] of Object.entries(tableToPk)) { + // if (value.length > 1) { + result[key].compositePrimaryKeys = { + [`${key}_${value.join('_')}`]: { + name: `${key}_${value.join('_')}`, + columns: value, + }, + }; + // } else if (value.length === 1) { + // result[key].columns[value[0]].primaryKey = true; + // } else { + // } + } + if (progressCallback) { + progressCallback('columns', columnsCount, 'done'); + progressCallback('tables', tablesCount.size, 'done'); + } + + for (const idxRow of idxRows) { + const tableSchema = idxRow['TABLE_SCHEMA']; + const tableName = idxRow['TABLE_NAME']; + const constraintName = idxRow['INDEX_NAME']; + const columnName: string = idxRow['COLUMN_NAME']; + const isUnique = idxRow['NON_UNIQUE'] === 0; + + const tableInResult = result[tableName]; + if (typeof tableInResult === 'undefined') continue; + + // if (tableInResult.columns[columnName].type === "serial") continue; + + indexesCount += 1; + if (progressCallback) { + progressCallback('indexes', indexesCount, 'fetching'); + } + + if (isUnique) { + if ( + typeof tableInResult.uniqueConstraints[constraintName] !== 'undefined' + ) { + tableInResult.uniqueConstraints[constraintName]!.columns.push( + columnName, + ); + } else { + tableInResult.uniqueConstraints[constraintName] = { + name: constraintName, + columns: [columnName], + }; + } + } + } + + /* const views = await db.query( + `select * from INFORMATION_SCHEMA.VIEWS WHERE table_schema = '${inputSchema}';`, + ); */ + + /* const resultViews: Record = {}; */ + + /* viewsCount = views.length; + if (progressCallback) { + progressCallback('views', viewsCount, 'fetching'); + } + for await (const view of views) { + const viewName = view['TABLE_NAME']; + const definition = view['VIEW_DEFINITION']; + + const withCheckOption = view['CHECK_OPTION'] === 'NONE' + ? undefined + : view['CHECK_OPTION'].toLowerCase(); + const sqlSecurity = view['SECURITY_TYPE'].toLowerCase(); + + const [createSqlStatement] = await db.query( + `SHOW CREATE VIEW \`${viewName}\`;`, + ); + const algorithmMatch = createSqlStatement['Create View'].match(/ALGORITHM=([^ ]+)/); + const algorithm = algorithmMatch + ? algorithmMatch[1].toLowerCase() + : undefined; + + const columns = result[viewName].columns; + delete result[viewName]; + + resultViews[viewName] = { + columns: columns, + isExisting: false, + name: viewName, + algorithm, + definition, + sqlSecurity, + withCheckOption, + }; + } */ + + if (progressCallback) { + progressCallback('indexes', indexesCount, 'done'); + // progressCallback("enums", 0, "fetching"); + progressCallback('enums', 0, 'done'); + } + + return { + version: '1', + dialect: 'singlestore', + tables: result, + /* views: resultViews, */ + _meta: { + tables: {}, + columns: {}, + }, + internal: internals, + }; +}; diff --git a/drizzle-kit/src/serializer/studio.ts b/drizzle-kit/src/serializer/studio.ts index d83a65b08..bbd811627 100644 --- a/drizzle-kit/src/serializer/studio.ts +++ b/drizzle-kit/src/serializer/studio.ts @@ -15,6 +15,11 @@ import { } from 'drizzle-orm'; import { AnyMySqlTable, getTableConfig as mysqlTableConfig, MySqlTable } from 'drizzle-orm/mysql-core'; import { AnyPgTable, getTableConfig as pgTableConfig, PgTable } from 'drizzle-orm/pg-core'; +import { + AnySingleStoreTable, + getTableConfig as singlestoreTableConfig, + SingleStoreTable, +} from 'drizzle-orm/singlestore-core'; import { AnySQLiteTable, getTableConfig as sqliteTableConfig, SQLiteTable } from 'drizzle-orm/sqlite-core'; import fs from 'fs'; import { Hono } from 'hono'; @@ -28,6 +33,7 @@ import { z } from 'zod'; import { safeRegister } from '../cli/commands/utils'; import type { MysqlCredentials } from '../cli/validations/mysql'; import type { PostgresCredentials } from '../cli/validations/postgres'; +import type { SingleStoreCredentials } from '../cli/validations/singlestore'; import type { SqliteCredentials } from '../cli/validations/sqlite'; import { prepareFilenames } from '.'; @@ -45,7 +51,7 @@ type SchemaFile = { export type Setup = { dbHash: string; - dialect: 'postgresql' | 'mysql' | 'sqlite'; + dialect: 'postgresql' | 'mysql' | 'sqlite' | 'singlestore'; driver?: 'aws-data-api' | 'd1-http' | 'turso' | 'pglite'; proxy: (params: ProxyParams) => Promise; customDefaults: CustomDefault[]; @@ -172,6 +178,43 @@ export const prepareSQLiteSchema = async (path: string | string[]) => { return { schema: sqliteSchema, relations, files }; }; +export const prepareSingleStoreSchema = async (path: string | string[]) => { + const imports = prepareFilenames(path); + const singlestoreSchema: Record> = { + public: {}, + }; + const relations: Record = {}; + + // files content as string + const files = imports.map((it, index) => ({ + // get the file name from the path + name: it.split('/').pop() || `schema${index}.ts`, + content: fs.readFileSync(it, 'utf-8'), + })); + + const { unregister } = await safeRegister(); + for (let i = 0; i < imports.length; i++) { + const it = imports[i]; + + const i0: Record = require(`${it}`); + const i0values = Object.entries(i0); + + i0values.forEach(([k, t]) => { + if (is(t, SingleStoreTable)) { + const schema = singlestoreTableConfig(t).schema || 'public'; + singlestoreSchema[schema][k] = t; + } + + if (is(t, Relations)) { + relations[k] = t; + } + }); + } + unregister(); + + return { schema: singlestoreSchema, relations, files }; +}; + const getCustomDefaults = >( schema: Record>, ): CustomDefault[] => { @@ -187,8 +230,10 @@ const getCustomDefaults = >( tableConfig = pgTableConfig(table); } else if (is(table, MySqlTable)) { tableConfig = mysqlTableConfig(table); - } else { + } else if (is(table, SQLiteTable)) { tableConfig = sqliteTableConfig(table); + } else { + tableConfig = singlestoreTableConfig(table); } tableConfig.columns.map((column) => { @@ -346,6 +391,39 @@ export const drizzleForLibSQL = async ( }; }; +export const drizzleForSingleStore = async ( + credentials: SingleStoreCredentials, + singlestoreSchema: Record>, + relations: Record, + schemaFiles?: SchemaFile[], +): Promise => { + const { connectToSingleStore } = await import('../cli/connections'); + const { proxy } = await connectToSingleStore(credentials); + + const customDefaults = getCustomDefaults(singlestoreSchema); + + let dbUrl: string; + + if ('url' in credentials) { + dbUrl = credentials.url; + } else { + dbUrl = + `singlestore://${credentials.user}:${credentials.password}@${credentials.host}:${credentials.port}/${credentials.database}`; + } + + const dbHash = createHash('sha256').update(dbUrl).digest('hex'); + + return { + dbHash, + dialect: 'singlestore', + proxy, + customDefaults, + schema: singlestoreSchema, + relations, + schemaFiles, + }; +}; + export const extractRelations = (tablesConfig: { tables: TablesRelationalConfig; tableNamesMap: Record; @@ -371,6 +449,8 @@ export const extractRelations = (tablesConfig: { refSchema = mysqlTableConfig(refTable).schema; } else if (is(refTable, SQLiteTable)) { refSchema = undefined; + } else if (is(refTable, SingleStoreTable)) { + refSchema = singlestoreTableConfig(refTable).schema; } else { throw new Error('unsupported dialect'); } diff --git a/drizzle-kit/src/snapshotsDiffer.ts b/drizzle-kit/src/snapshotsDiffer.ts index 060f12bbd..2db4ad02c 100644 --- a/drizzle-kit/src/snapshotsDiffer.ts +++ b/drizzle-kit/src/snapshotsDiffer.ts @@ -117,6 +117,7 @@ import { prepareRenameSequenceJson, prepareRenameTableJson, prepareRenameViewJson, + prepareSingleStoreCreateTableJson, prepareSqliteAlterColumns, prepareSQLiteCreateTable, prepareSqliteCreateViewJson, @@ -131,12 +132,14 @@ import { PgSchemaSquashed, PgSquasher, Policy, + policy, policySquashed, Role, roleSchema, sequenceSquashed, View, } from './serializer/pgSchema'; +import { SingleStoreSchema, SingleStoreSchemaSquashed, SingleStoreSquasher } from './serializer/singlestoreSchema'; import { SQLiteSchema, SQLiteSchemaSquashed, SQLiteSquasher, View as SqliteView } from './serializer/sqliteSchema'; import { libSQLCombineStatements, sqliteCombineStatements } from './statementCombiner'; import { copy, prepareMigrationMeta } from './utils'; @@ -393,6 +396,11 @@ export const diffResultSchemeMysql = object({ alteredViews: alteredMySqlViewSchema.array(), }); +export const diffResultSchemeSingleStore = object({ + alteredTablesWithColumns: alteredTableScheme.array(), + alteredEnums: never().array(), +}); + export const diffResultSchemeSQLite = object({ alteredTablesWithColumns: alteredTableScheme.array(), alteredEnums: never().array(), @@ -407,6 +415,7 @@ export type Table = TypeOf; export type AlteredTable = TypeOf; export type DiffResult = TypeOf; export type DiffResultMysql = TypeOf; +export type DiffResultSingleStore = TypeOf; export type DiffResultSQLite = TypeOf; export interface ResolverInput { @@ -1267,17 +1276,21 @@ export const applyPgSnapshotsDiff = async ( it.name, it.schema, it.addedCompositePKs, + curFull as PgSchema, ); deletedCompositePKs = prepareDeleteCompositePrimaryKeyPg( it.name, it.schema, it.deletedCompositePKs, + prevFull as PgSchema, ); } alteredCompositePKs = prepareAlterCompositePrimaryKeyPg( it.name, it.schema, it.alteredCompositePKs, + prevFull as PgSchema, + curFull as PgSchema, ); // add logic for unique constraints @@ -2673,6 +2686,519 @@ export const applyMysqlSnapshotsDiff = async ( }; }; +export const applySingleStoreSnapshotsDiff = async ( + json1: SingleStoreSchemaSquashed, + json2: SingleStoreSchemaSquashed, + tablesResolver: ( + input: ResolverInput, + ) => Promise>, + columnsResolver: ( + input: ColumnsResolverInput, + ) => Promise>, + /* viewsResolver: ( + input: ResolverInput, + ) => Promise>, */ + prevFull: SingleStoreSchema, + curFull: SingleStoreSchema, + action?: 'push' | undefined, +): Promise<{ + statements: JsonStatement[]; + sqlStatements: string[]; + _meta: + | { + schemas: {}; + tables: {}; + columns: {}; + } + | undefined; +}> => { + // squash indexes and fks + + // squash uniqueIndexes and uniqueConstraint into constraints object + // it should be done for singlestore only because it has no diffs for it + + // TODO: @AndriiSherman + // Add an upgrade to v6 and move all snaphosts to this strcutre + // After that we can generate singlestore in 1 object directly(same as sqlite) + for (const tableName in json1.tables) { + const table = json1.tables[tableName]; + for (const indexName in table.indexes) { + const index = SingleStoreSquasher.unsquashIdx(table.indexes[indexName]); + if (index.isUnique) { + table.uniqueConstraints[indexName] = SingleStoreSquasher.squashUnique({ + name: index.name, + columns: index.columns, + }); + delete json1.tables[tableName].indexes[index.name]; + } + } + } + + for (const tableName in json2.tables) { + const table = json2.tables[tableName]; + for (const indexName in table.indexes) { + const index = SingleStoreSquasher.unsquashIdx(table.indexes[indexName]); + if (index.isUnique) { + table.uniqueConstraints[indexName] = SingleStoreSquasher.squashUnique({ + name: index.name, + columns: index.columns, + }); + delete json2.tables[tableName].indexes[index.name]; + } + } + } + + const tablesDiff = diffSchemasOrTables(json1.tables, json2.tables); + + const { + created: createdTables, + deleted: deletedTables, + renamed: renamedTables, // renamed or moved + } = await tablesResolver({ + created: tablesDiff.added, + deleted: tablesDiff.deleted, + }); + + const tablesPatchedSnap1 = copy(json1); + tablesPatchedSnap1.tables = mapEntries(tablesPatchedSnap1.tables, (_, it) => { + const { name } = nameChangeFor(it, renamedTables); + it.name = name; + return [name, it]; + }); + + const res = diffColumns(tablesPatchedSnap1.tables, json2.tables); + const columnRenames = [] as { + table: string; + renames: { from: Column; to: Column }[]; + }[]; + + const columnCreates = [] as { + table: string; + columns: Column[]; + }[]; + + const columnDeletes = [] as { + table: string; + columns: Column[]; + }[]; + + for (let entry of Object.values(res)) { + const { renamed, created, deleted } = await columnsResolver({ + tableName: entry.name, + schema: entry.schema, + deleted: entry.columns.deleted, + created: entry.columns.added, + }); + + if (created.length > 0) { + columnCreates.push({ + table: entry.name, + columns: created, + }); + } + + if (deleted.length > 0) { + columnDeletes.push({ + table: entry.name, + columns: deleted, + }); + } + + if (renamed.length > 0) { + columnRenames.push({ + table: entry.name, + renames: renamed, + }); + } + } + + const columnRenamesDict = columnRenames.reduce( + (acc, it) => { + acc[it.table] = it.renames; + return acc; + }, + {} as Record< + string, + { + from: Named; + to: Named; + }[] + >, + ); + + const columnsPatchedSnap1 = copy(tablesPatchedSnap1); + columnsPatchedSnap1.tables = mapEntries( + columnsPatchedSnap1.tables, + (tableKey, tableValue) => { + const patchedColumns = mapKeys( + tableValue.columns, + (columnKey, column) => { + const rens = columnRenamesDict[tableValue.name] || []; + const newName = columnChangeFor(columnKey, rens); + column.name = newName; + return newName; + }, + ); + + tableValue.columns = patchedColumns; + return [tableKey, tableValue]; + }, + ); + + /* const viewsDiff = diffSchemasOrTables(json1.views, json2.views); + + const { + created: createdViews, + deleted: deletedViews, + renamed: renamedViews, // renamed or moved + } = await viewsResolver({ + created: viewsDiff.added, + deleted: viewsDiff.deleted, + }); + + const renamesViewDic: Record = {}; + renamedViews.forEach((it) => { + renamesViewDic[it.from.name] = { to: it.to.name, from: it.from.name }; + }); + + const viewsPatchedSnap1 = copy(columnsPatchedSnap1); + viewsPatchedSnap1.views = mapEntries( + viewsPatchedSnap1.views, + (viewKey, viewValue) => { + const rename = renamesViewDic[viewValue.name]; + + if (rename) { + viewValue.name = rename.to; + viewKey = rename.to; + } + + return [viewKey, viewValue]; + }, + ); + + */ + const diffResult = applyJsonDiff(tablesPatchedSnap1, json2); // replace tablesPatchedSnap1 with viewsPatchedSnap1 + + const typedResult: DiffResultSingleStore = diffResultSchemeSingleStore.parse(diffResult); + + const jsonStatements: JsonStatement[] = []; + + const jsonCreateIndexesForCreatedTables = createdTables + .map((it) => { + return prepareCreateIndexesJson( + it.name, + it.schema, + it.indexes, + curFull.internal, + ); + }) + .flat(); + + const jsonDropTables = deletedTables.map((it) => { + return prepareDropTableJson(it); + }); + + const jsonRenameTables = renamedTables.map((it) => { + return prepareRenameTableJson(it.from, it.to); + }); + + const alteredTables = typedResult.alteredTablesWithColumns; + + const jsonAddedCompositePKs: JsonCreateCompositePK[] = []; + + const jsonAddedUniqueConstraints: JsonCreateUniqueConstraint[] = []; + const jsonDeletedUniqueConstraints: JsonDeleteUniqueConstraint[] = []; + const jsonAlteredUniqueConstraints: JsonAlterUniqueConstraint[] = []; + + const jsonRenameColumnsStatements: JsonRenameColumnStatement[] = columnRenames + .map((it) => prepareRenameColumns(it.table, '', it.renames)) + .flat(); + + const jsonAddColumnsStatemets: JsonAddColumnStatement[] = columnCreates + .map((it) => _prepareAddColumns(it.table, '', it.columns)) + .flat(); + + const jsonDropColumnsStatemets: JsonDropColumnStatement[] = columnDeletes + .map((it) => _prepareDropColumns(it.table, '', it.columns)) + .flat(); + + alteredTables.forEach((it) => { + // This part is needed to make sure that same columns in a table are not triggered for change + // there is a case where orm and kit are responsible for pk name generation and one of them is not sorting name + // We double-check that pk with same set of columns are both in added and deleted diffs + let addedColumns: string[] = []; + for (const addedPkName of Object.keys(it.addedCompositePKs)) { + const addedPkColumns = it.addedCompositePKs[addedPkName]; + addedColumns = SingleStoreSquasher.unsquashPK(addedPkColumns).columns; + } + + let deletedColumns: string[] = []; + for (const deletedPkName of Object.keys(it.deletedCompositePKs)) { + const deletedPkColumns = it.deletedCompositePKs[deletedPkName]; + deletedColumns = SingleStoreSquasher.unsquashPK(deletedPkColumns).columns; + } + + // Don't need to sort, but need to add tests for it + // addedColumns.sort(); + // deletedColumns.sort(); + const doPerformDeleteAndCreate = JSON.stringify(addedColumns) !== JSON.stringify(deletedColumns); + + // add logic for unique constraints + let addedUniqueConstraints: JsonCreateUniqueConstraint[] = []; + let deletedUniqueConstraints: JsonDeleteUniqueConstraint[] = []; + let alteredUniqueConstraints: JsonAlterUniqueConstraint[] = []; + + let createdCheckConstraints: JsonCreateCheckConstraint[] = []; + let deletedCheckConstraints: JsonDeleteCheckConstraint[] = []; + + addedUniqueConstraints = prepareAddUniqueConstraint( + it.name, + it.schema, + it.addedUniqueConstraints, + ); + deletedUniqueConstraints = prepareDeleteUniqueConstraint( + it.name, + it.schema, + it.deletedUniqueConstraints, + ); + if (it.alteredUniqueConstraints) { + const added: Record = {}; + const deleted: Record = {}; + for (const k of Object.keys(it.alteredUniqueConstraints)) { + added[k] = it.alteredUniqueConstraints[k].__new; + deleted[k] = it.alteredUniqueConstraints[k].__old; + } + addedUniqueConstraints.push( + ...prepareAddUniqueConstraint(it.name, it.schema, added), + ); + deletedUniqueConstraints.push( + ...prepareDeleteUniqueConstraint(it.name, it.schema, deleted), + ); + } + + createdCheckConstraints = prepareAddCheckConstraint(it.name, it.schema, it.addedCheckConstraints); + deletedCheckConstraints = prepareDeleteCheckConstraint( + it.name, + it.schema, + it.deletedCheckConstraints, + ); + + // skip for push + if (it.alteredCheckConstraints && action !== 'push') { + const added: Record = {}; + const deleted: Record = {}; + + for (const k of Object.keys(it.alteredCheckConstraints)) { + added[k] = it.alteredCheckConstraints[k].__new; + deleted[k] = it.alteredCheckConstraints[k].__old; + } + createdCheckConstraints.push(...prepareAddCheckConstraint(it.name, it.schema, added)); + deletedCheckConstraints.push(...prepareDeleteCheckConstraint(it.name, it.schema, deleted)); + } + + jsonAddedUniqueConstraints.push(...addedUniqueConstraints); + jsonDeletedUniqueConstraints.push(...deletedUniqueConstraints); + jsonAlteredUniqueConstraints.push(...alteredUniqueConstraints); + }); + + const rColumns = jsonRenameColumnsStatements.map((it) => { + const tableName = it.tableName; + const schema = it.schema; + return { + from: { schema, table: tableName, column: it.oldColumnName }, + to: { schema, table: tableName, column: it.newColumnName }, + }; + }); + + const jsonTableAlternations = alteredTables + .map((it) => { + return prepareAlterColumnsMysql( + it.name, + it.schema, + it.altered, + json1, + json2, + action, + ); + }) + .flat(); + + const jsonCreateIndexesForAllAlteredTables = alteredTables + .map((it) => { + return prepareCreateIndexesJson( + it.name, + it.schema, + it.addedIndexes || {}, + curFull.internal, + ); + }) + .flat(); + + const jsonDropIndexesForAllAlteredTables = alteredTables + .map((it) => { + return prepareDropIndexesJson( + it.name, + it.schema, + it.deletedIndexes || {}, + ); + }) + .flat(); + + alteredTables.forEach((it) => { + const droppedIndexes = Object.keys(it.alteredIndexes).reduce( + (current, item: string) => { + current[item] = it.alteredIndexes[item].__old; + return current; + }, + {} as Record, + ); + const createdIndexes = Object.keys(it.alteredIndexes).reduce( + (current, item: string) => { + current[item] = it.alteredIndexes[item].__new; + return current; + }, + {} as Record, + ); + + jsonCreateIndexesForAllAlteredTables.push( + ...prepareCreateIndexesJson(it.name, it.schema, createdIndexes || {}), + ); + jsonDropIndexesForAllAlteredTables.push( + ...prepareDropIndexesJson(it.name, it.schema, droppedIndexes || {}), + ); + }); + + const jsonSingleStoreCreateTables = createdTables.map((it) => { + return prepareSingleStoreCreateTableJson( + it, + curFull as SingleStoreSchema, + curFull.internal, + ); + }); + + /* const createViews: JsonCreateSingleStoreViewStatement[] = []; + const dropViews: JsonDropViewStatement[] = []; + const renameViews: JsonRenameViewStatement[] = []; + const alterViews: JsonAlterSingleStoreViewStatement[] = []; + + createViews.push( + ...createdViews.filter((it) => !it.isExisting).map((it) => { + return prepareSingleStoreCreateViewJson( + it.name, + it.definition!, + it.meta, + ); + }), + ); + + dropViews.push( + ...deletedViews.filter((it) => !it.isExisting).map((it) => { + return prepareDropViewJson(it.name); + }), + ); + + renameViews.push( + ...renamedViews.filter((it) => !it.to.isExisting && !json1.views[it.from.name].isExisting).map((it) => { + return prepareRenameViewJson(it.to.name, it.from.name); + }), + ); + + const alteredViews = typedResult.alteredViews.filter((it) => !json2.views[it.name].isExisting); + + for (const alteredView of alteredViews) { + const { definition, meta } = json2.views[alteredView.name]; + + if (alteredView.alteredExisting) { + dropViews.push(prepareDropViewJson(alteredView.name)); + + createViews.push( + prepareSingleStoreCreateViewJson( + alteredView.name, + definition!, + meta, + ), + ); + + continue; + } + + if (alteredView.alteredDefinition && action !== 'push') { + createViews.push( + prepareSingleStoreCreateViewJson( + alteredView.name, + definition!, + meta, + true, + ), + ); + continue; + } + + if (alteredView.alteredMeta) { + const view = curFull['views'][alteredView.name]; + alterViews.push( + prepareSingleStoreAlterView(view), + ); + } + } */ + + jsonStatements.push(...jsonSingleStoreCreateTables); + + jsonStatements.push(...jsonDropTables); + jsonStatements.push(...jsonRenameTables); + jsonStatements.push(...jsonRenameColumnsStatements); + + /*jsonStatements.push(...createViews); + jsonStatements.push(...dropViews); + jsonStatements.push(...renameViews); + jsonStatements.push(...alterViews); + */ + jsonStatements.push(...jsonDeletedUniqueConstraints); + + // Will need to drop indexes before changing any columns in table + // Then should go column alternations and then index creation + jsonStatements.push(...jsonDropIndexesForAllAlteredTables); + + jsonStatements.push(...jsonTableAlternations); + jsonStatements.push(...jsonAddedCompositePKs); + + jsonStatements.push(...jsonAddedUniqueConstraints); + jsonStatements.push(...jsonDeletedUniqueConstraints); + + jsonStatements.push(...jsonAddColumnsStatemets); + + jsonStatements.push(...jsonCreateIndexesForCreatedTables); + + jsonStatements.push(...jsonCreateIndexesForAllAlteredTables); + + jsonStatements.push(...jsonDropColumnsStatemets); + + jsonStatements.push(...jsonAddedCompositePKs); + + jsonStatements.push(...jsonAlteredUniqueConstraints); + + const sqlStatements = fromJson(jsonStatements, 'singlestore'); + + const uniqueSqlStatements: string[] = []; + sqlStatements.forEach((ss) => { + if (!uniqueSqlStatements.includes(ss)) { + uniqueSqlStatements.push(ss); + } + }); + + const rTables = renamedTables.map((it) => { + return { from: it.from, to: it.to }; + }); + + const _meta = prepareMigrationMeta([], rTables, rColumns); + + return { + statements: jsonStatements, + sqlStatements: uniqueSqlStatements, + _meta, + }; +}; + export const applySqliteSnapshotsDiff = async ( json1: SQLiteSchemaSquashed, json2: SQLiteSchemaSquashed, diff --git a/drizzle-kit/src/sqlgenerator.ts b/drizzle-kit/src/sqlgenerator.ts index 3c88a86ce..a35c001fd 100644 --- a/drizzle-kit/src/sqlgenerator.ts +++ b/drizzle-kit/src/sqlgenerator.ts @@ -85,7 +85,9 @@ import { import { Dialect } from './schemaValidator'; import { MySqlSquasher } from './serializer/mysqlSchema'; import { PgSquasher, policy } from './serializer/pgSchema'; +import { SingleStoreSquasher } from './serializer/singlestoreSchema'; import { SQLiteSchemaSquashed, SQLiteSquasher } from './serializer/sqliteSchema'; + import { escapeSingleQuotes } from './utils'; const parseType = (schemaPrefix: string, type: string) => { @@ -572,6 +574,81 @@ class MySqlCreateTableConvertor extends Convertor { return statement; } } +class SingleStoreCreateTableConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'create_table' && dialect === 'singlestore'; + } + + convert(st: JsonCreateTableStatement) { + const { + tableName, + columns, + schema, + compositePKs, + uniqueConstraints, + internals, + } = st; + + let statement = ''; + statement += `CREATE TABLE \`${tableName}\` (\n`; + for (let i = 0; i < columns.length; i++) { + const column = columns[i]; + + const primaryKeyStatement = column.primaryKey ? ' PRIMARY KEY' : ''; + const notNullStatement = column.notNull ? ' NOT NULL' : ''; + const defaultStatement = column.default !== undefined ? ` DEFAULT ${column.default}` : ''; + + const onUpdateStatement = column.onUpdate + ? ` ON UPDATE CURRENT_TIMESTAMP` + : ''; + + const autoincrementStatement = column.autoincrement + ? ' AUTO_INCREMENT' + : ''; + + const generatedStatement = column.generated + ? ` GENERATED ALWAYS AS (${column.generated?.as}) ${column.generated?.type.toUpperCase()}` + : ''; + + statement += '\t' + + `\`${column.name}\` ${column.type}${autoincrementStatement}${primaryKeyStatement}${notNullStatement}${defaultStatement}${onUpdateStatement}${generatedStatement}`; + statement += i === columns.length - 1 ? '' : ',\n'; + } + + if (typeof compositePKs !== 'undefined' && compositePKs.length > 0) { + statement += ',\n'; + const compositePK = SingleStoreSquasher.unsquashPK(compositePKs[0]); + statement += `\tCONSTRAINT \`${st.compositePkName}\` PRIMARY KEY(\`${compositePK.columns.join(`\`,\``)}\`)`; + } + + if ( + typeof uniqueConstraints !== 'undefined' + && uniqueConstraints.length > 0 + ) { + for (const uniqueConstraint of uniqueConstraints) { + statement += ',\n'; + const unsquashedUnique = SingleStoreSquasher.unsquashUnique(uniqueConstraint); + + const uniqueString = unsquashedUnique.columns + .map((it) => { + return internals?.indexes + ? internals?.indexes[unsquashedUnique.name]?.columns[it] + ?.isExpression + ? it + : `\`${it}\`` + : `\`${it}\``; + }) + .join(','); + + statement += `\tCONSTRAINT \`${unsquashedUnique.name}\` UNIQUE(${uniqueString})`; + } + } + + statement += `\n);`; + statement += `\n`; + return statement; + } +} export class SQLiteCreateTableConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { @@ -1165,6 +1242,29 @@ class MySqlAlterTableAddCheckConstraintConvertor extends Convertor { } } +class SingleStoreAlterTableAddUniqueConstraintConvertor extends Convertor { + can(statement: JsonCreateUniqueConstraint, dialect: Dialect): boolean { + return statement.type === 'create_unique_constraint' && dialect === 'singlestore'; + } + convert(statement: JsonCreateUniqueConstraint): string { + const unsquashed = SingleStoreSquasher.unsquashUnique(statement.data); + + return `ALTER TABLE \`${statement.tableName}\` ADD CONSTRAINT \`${unsquashed.name}\` UNIQUE(\`${ + unsquashed.columns.join('`,`') + }\`);`; + } +} +class SingleStoreAlterTableDropUniqueConstraintConvertor extends Convertor { + can(statement: JsonDeleteUniqueConstraint, dialect: Dialect): boolean { + return statement.type === 'delete_unique_constraint' && dialect === 'singlestore'; + } + convert(statement: JsonDeleteUniqueConstraint): string { + const unsquashed = SingleStoreSquasher.unsquashUnique(statement.data); + + return `ALTER TABLE \`${statement.tableName}\` DROP INDEX \`${unsquashed.name}\`;`; + } +} + class MySqlAlterTableDeleteCheckConstraintConvertor extends Convertor { can(statement: JsonDeleteCheckConstraint, dialect: Dialect): boolean { return ( @@ -1431,6 +1531,17 @@ class MySQLDropTableConvertor extends Convertor { } } +class SingleStoreDropTableConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'drop_table' && dialect === 'singlestore'; + } + + convert(statement: JsonDropTableStatement) { + const { tableName } = statement; + return `DROP TABLE \`${tableName}\`;`; + } +} + export class SQLiteDropTableConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return statement.type === 'drop_table' && (dialect === 'sqlite' || dialect === 'turso'); @@ -1479,6 +1590,17 @@ class MySqlRenameTableConvertor extends Convertor { } } +class SingleStoreRenameTableConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'rename_table' && dialect === 'singlestore'; + } + + convert(statement: JsonRenameTableStatement) { + const { tableNameFrom, tableNameTo } = statement; + return `RENAME TABLE \`${tableNameFrom}\` TO \`${tableNameTo}\`;`; + } +} + class PgAlterTableRenameColumnConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return ( @@ -1510,6 +1632,19 @@ class MySqlAlterTableRenameColumnConvertor extends Convertor { } } +class SingleStoreAlterTableRenameColumnConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return ( + statement.type === 'alter_table_rename_column' && dialect === 'singlestore' + ); + } + + convert(statement: JsonRenameColumnStatement) { + const { tableName, oldColumnName, newColumnName } = statement; + return `ALTER TABLE \`${tableName}\` RENAME COLUMN \`${oldColumnName}\` TO \`${newColumnName}\`;`; + } +} + class SQLiteAlterTableRenameColumnConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return ( @@ -1552,6 +1687,17 @@ class MySqlAlterTableDropColumnConvertor extends Convertor { } } +class SingleStoreAlterTableDropColumnConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'alter_table_drop_column' && dialect === 'singlestore'; + } + + convert(statement: JsonDropColumnStatement) { + const { tableName, columnName } = statement; + return `ALTER TABLE \`${tableName}\` DROP COLUMN \`${columnName}\`;`; + } +} + class SQLiteAlterTableDropColumnConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return statement.type === 'alter_table_drop_column' && (dialect === 'sqlite' || dialect === 'turso'); @@ -1659,6 +1805,37 @@ class MySqlAlterTableAddColumnConvertor extends Convertor { } } +class SingleStoreAlterTableAddColumnConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'alter_table_add_column' && dialect === 'singlestore'; + } + + convert(statement: JsonAddColumnStatement) { + const { tableName, column } = statement; + const { + name, + type, + notNull, + primaryKey, + autoincrement, + onUpdate, + generated, + } = column; + + const defaultStatement = `${column.default !== undefined ? ` DEFAULT ${column.default}` : ''}`; + const notNullStatement = `${notNull ? ' NOT NULL' : ''}`; + const primaryKeyStatement = `${primaryKey ? ' PRIMARY KEY' : ''}`; + const autoincrementStatement = `${autoincrement ? ' AUTO_INCREMENT' : ''}`; + const onUpdateStatement = `${onUpdate ? ' ON UPDATE CURRENT_TIMESTAMP' : ''}`; + + const generatedStatement = generated + ? ` GENERATED ALWAYS AS (${generated?.as}) ${generated?.type.toUpperCase()}` + : ''; + + return `ALTER TABLE \`${tableName}\` ADD \`${name}\` ${type}${primaryKeyStatement}${autoincrementStatement}${defaultStatement}${notNullStatement}${onUpdateStatement}${generatedStatement};`; + } +} + export class SQLiteAlterTableAddColumnConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return ( @@ -2462,76 +2639,429 @@ class MySqlModifyColumn extends Convertor { } } -class PgAlterTableCreateCompositePrimaryKeyConvertor extends Convertor { +class SingleStoreAlterTableAlterColumnAlterrGeneratedConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { - return statement.type === 'create_composite_pk' && dialect === 'postgresql'; + return ( + statement.type === 'alter_table_alter_column_alter_generated' + && dialect === 'singlestore' + ); } - convert(statement: JsonCreateCompositePK) { - const { name, columns } = PgSquasher.unsquashPK(statement.data); + convert(statement: JsonAlterColumnAlterGeneratedStatement) { + const { + tableName, + columnName, + schema, + columnNotNull: notNull, + columnDefault, + columnOnUpdate, + columnAutoIncrement, + columnPk, + columnGenerated, + } = statement; - const tableNameWithSchema = statement.schema - ? `"${statement.schema}"."${statement.tableName}"` - : `"${statement.tableName}"`; + const tableNameWithSchema = schema + ? `\`${schema}\`.\`${tableName}\`` + : `\`${tableName}\``; - return `ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT "${statement.constraintName}" PRIMARY KEY("${ - columns.join('","') - }");`; + const addColumnStatement = new SingleStoreAlterTableAddColumnConvertor().convert({ + schema, + tableName, + column: { + name: columnName, + type: statement.newDataType, + notNull, + default: columnDefault, + onUpdate: columnOnUpdate, + autoincrement: columnAutoIncrement, + primaryKey: columnPk, + generated: columnGenerated, + }, + type: 'alter_table_add_column', + }); + + return [ + `ALTER TABLE ${tableNameWithSchema} drop column \`${columnName}\`;`, + addColumnStatement, + ]; } } -class PgAlterTableDeleteCompositePrimaryKeyConvertor extends Convertor { + +class SingleStoreAlterTableAlterColumnSetDefaultConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { - return statement.type === 'delete_composite_pk' && dialect === 'postgresql'; + return ( + statement.type === 'alter_table_alter_column_set_default' + && dialect === 'singlestore' + ); } - convert(statement: JsonDeleteCompositePK) { - const { name, columns } = PgSquasher.unsquashPK(statement.data); - - const tableNameWithSchema = statement.schema - ? `"${statement.schema}"."${statement.tableName}"` - : `"${statement.tableName}"`; - - return `ALTER TABLE ${tableNameWithSchema} DROP CONSTRAINT "${statement.constraintName}";`; + convert(statement: JsonAlterColumnSetDefaultStatement) { + const { tableName, columnName } = statement; + return `ALTER TABLE \`${tableName}\` ALTER COLUMN \`${columnName}\` SET DEFAULT ${statement.newDefaultValue};`; } } -class PgAlterTableAlterCompositePrimaryKeyConvertor extends Convertor { +class SingleStoreAlterTableAlterColumnDropDefaultConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { - return statement.type === 'alter_composite_pk' && dialect === 'postgresql'; - } - - convert(statement: JsonAlterCompositePK) { - const { name, columns } = PgSquasher.unsquashPK(statement.old); - const { name: newName, columns: newColumns } = PgSquasher.unsquashPK( - statement.new, + return ( + statement.type === 'alter_table_alter_column_drop_default' + && dialect === 'singlestore' ); + } - const tableNameWithSchema = statement.schema - ? `"${statement.schema}"."${statement.tableName}"` - : `"${statement.tableName}"`; - - console.log(statement.oldConstraintName, statement.newConstraintName); - return `ALTER TABLE ${tableNameWithSchema} DROP CONSTRAINT "${statement.oldConstraintName}";\n${BREAKPOINT}ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT "${statement.newConstraintName}" PRIMARY KEY("${ - newColumns.join('","') - }");`; + convert(statement: JsonAlterColumnDropDefaultStatement) { + const { tableName, columnName } = statement; + return `ALTER TABLE \`${tableName}\` ALTER COLUMN \`${columnName}\` DROP DEFAULT;`; } } -class MySqlAlterTableCreateCompositePrimaryKeyConvertor extends Convertor { - can(statement: JsonStatement, dialect: Dialect): boolean { - return statement.type === 'create_composite_pk' && dialect === 'mysql'; +class SingleStoreAlterTableAddPk extends Convertor { + can(statement: JsonStatement, dialect: string): boolean { + return ( + statement.type === 'alter_table_alter_column_set_pk' + && dialect === 'singlestore' + ); } - - convert(statement: JsonCreateCompositePK) { - const { name, columns } = MySqlSquasher.unsquashPK(statement.data); - return `ALTER TABLE \`${statement.tableName}\` ADD PRIMARY KEY(\`${columns.join('`,`')}\`);`; + convert(statement: JsonAlterColumnSetPrimaryKeyStatement): string { + return `ALTER TABLE \`${statement.tableName}\` ADD PRIMARY KEY (\`${statement.columnName}\`);`; } } -class MySqlAlterTableDeleteCompositePrimaryKeyConvertor extends Convertor { - can(statement: JsonStatement, dialect: Dialect): boolean { - return statement.type === 'delete_composite_pk' && dialect === 'mysql'; - } +class SingleStoreAlterTableDropPk extends Convertor { + can(statement: JsonStatement, dialect: string): boolean { + return ( + statement.type === 'alter_table_alter_column_drop_pk' + && dialect === 'singlestore' + ); + } + convert(statement: JsonAlterColumnDropPrimaryKeyStatement): string { + return `ALTER TABLE \`${statement.tableName}\` DROP PRIMARY KEY`; + } +} + +type SingleStoreModifyColumnStatement = + | JsonAlterColumnDropNotNullStatement + | JsonAlterColumnSetNotNullStatement + | JsonAlterColumnTypeStatement + | JsonAlterColumnDropOnUpdateStatement + | JsonAlterColumnSetOnUpdateStatement + | JsonAlterColumnDropAutoincrementStatement + | JsonAlterColumnSetAutoincrementStatement + | JsonAlterColumnSetDefaultStatement + | JsonAlterColumnDropDefaultStatement + | JsonAlterColumnSetGeneratedStatement + | JsonAlterColumnDropGeneratedStatement; + +class SingleStoreModifyColumn extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return ( + (statement.type === 'alter_table_alter_column_set_type' + || statement.type === 'alter_table_alter_column_set_notnull' + || statement.type === 'alter_table_alter_column_drop_notnull' + || statement.type === 'alter_table_alter_column_drop_on_update' + || statement.type === 'alter_table_alter_column_set_on_update' + || statement.type === 'alter_table_alter_column_set_autoincrement' + || statement.type === 'alter_table_alter_column_drop_autoincrement' + || statement.type === 'alter_table_alter_column_set_default' + || statement.type === 'alter_table_alter_column_drop_default' + || statement.type === 'alter_table_alter_column_set_generated' + || statement.type === 'alter_table_alter_column_drop_generated') + && dialect === 'singlestore' + ); + } + + convert(statement: SingleStoreModifyColumnStatement) { + const { tableName, columnName } = statement; + let columnType = ``; + let columnDefault: any = ''; + let columnNotNull = ''; + let columnOnUpdate = ''; + let columnAutoincrement = ''; + let primaryKey = statement.columnPk ? ' PRIMARY KEY' : ''; + let columnGenerated = ''; + + if (statement.type === 'alter_table_alter_column_drop_notnull') { + columnType = ` ${statement.newDataType}`; + columnDefault = statement.columnDefault + ? ` DEFAULT ${statement.columnDefault}` + : ''; + columnNotNull = statement.columnNotNull ? ` NOT NULL` : ''; + columnOnUpdate = statement.columnOnUpdate + ? ` ON UPDATE CURRENT_TIMESTAMP` + : ''; + columnAutoincrement = statement.columnAutoIncrement + ? ' AUTO_INCREMENT' + : ''; + } else if (statement.type === 'alter_table_alter_column_set_notnull') { + columnNotNull = ` NOT NULL`; + columnType = ` ${statement.newDataType}`; + columnDefault = statement.columnDefault + ? ` DEFAULT ${statement.columnDefault}` + : ''; + columnOnUpdate = statement.columnOnUpdate + ? ` ON UPDATE CURRENT_TIMESTAMP` + : ''; + columnAutoincrement = statement.columnAutoIncrement + ? ' AUTO_INCREMENT' + : ''; + } else if (statement.type === 'alter_table_alter_column_drop_on_update') { + columnNotNull = statement.columnNotNull ? ` NOT NULL` : ''; + columnType = ` ${statement.newDataType}`; + columnDefault = statement.columnDefault + ? ` DEFAULT ${statement.columnDefault}` + : ''; + columnOnUpdate = ''; + columnAutoincrement = statement.columnAutoIncrement + ? ' AUTO_INCREMENT' + : ''; + } else if (statement.type === 'alter_table_alter_column_set_on_update') { + columnNotNull = statement.columnNotNull ? ` NOT NULL` : ''; + columnOnUpdate = ` ON UPDATE CURRENT_TIMESTAMP`; + columnType = ` ${statement.newDataType}`; + columnDefault = statement.columnDefault + ? ` DEFAULT ${statement.columnDefault}` + : ''; + columnAutoincrement = statement.columnAutoIncrement + ? ' AUTO_INCREMENT' + : ''; + } else if ( + statement.type === 'alter_table_alter_column_set_autoincrement' + ) { + columnNotNull = statement.columnNotNull ? ` NOT NULL` : ''; + columnOnUpdate = columnOnUpdate = statement.columnOnUpdate + ? ` ON UPDATE CURRENT_TIMESTAMP` + : ''; + columnType = ` ${statement.newDataType}`; + columnDefault = statement.columnDefault + ? ` DEFAULT ${statement.columnDefault}` + : ''; + columnAutoincrement = ' AUTO_INCREMENT'; + } else if ( + statement.type === 'alter_table_alter_column_drop_autoincrement' + ) { + columnNotNull = statement.columnNotNull ? ` NOT NULL` : ''; + columnOnUpdate = columnOnUpdate = statement.columnOnUpdate + ? ` ON UPDATE CURRENT_TIMESTAMP` + : ''; + columnType = ` ${statement.newDataType}`; + columnDefault = statement.columnDefault + ? ` DEFAULT ${statement.columnDefault}` + : ''; + columnAutoincrement = ''; + } else if (statement.type === 'alter_table_alter_column_set_default') { + columnNotNull = statement.columnNotNull ? ` NOT NULL` : ''; + columnOnUpdate = columnOnUpdate = statement.columnOnUpdate + ? ` ON UPDATE CURRENT_TIMESTAMP` + : ''; + columnType = ` ${statement.newDataType}`; + columnDefault = ` DEFAULT ${statement.newDefaultValue}`; + columnAutoincrement = statement.columnAutoIncrement + ? ' AUTO_INCREMENT' + : ''; + } else if (statement.type === 'alter_table_alter_column_drop_default') { + columnNotNull = statement.columnNotNull ? ` NOT NULL` : ''; + columnOnUpdate = columnOnUpdate = statement.columnOnUpdate + ? ` ON UPDATE CURRENT_TIMESTAMP` + : ''; + columnType = ` ${statement.newDataType}`; + columnDefault = ''; + columnAutoincrement = statement.columnAutoIncrement + ? ' AUTO_INCREMENT' + : ''; + } else if (statement.type === 'alter_table_alter_column_set_generated') { + columnType = ` ${statement.newDataType}`; + columnNotNull = statement.columnNotNull ? ` NOT NULL` : ''; + columnOnUpdate = columnOnUpdate = statement.columnOnUpdate + ? ` ON UPDATE CURRENT_TIMESTAMP` + : ''; + columnDefault = statement.columnDefault + ? ` DEFAULT ${statement.columnDefault}` + : ''; + columnAutoincrement = statement.columnAutoIncrement + ? ' AUTO_INCREMENT' + : ''; + + if (statement.columnGenerated?.type === 'virtual') { + return [ + new SingleStoreAlterTableDropColumnConvertor().convert({ + type: 'alter_table_drop_column', + tableName: statement.tableName, + columnName: statement.columnName, + schema: statement.schema, + }), + new SingleStoreAlterTableAddColumnConvertor().convert({ + tableName, + column: { + name: columnName, + type: statement.newDataType, + notNull: statement.columnNotNull, + default: statement.columnDefault, + onUpdate: statement.columnOnUpdate, + autoincrement: statement.columnAutoIncrement, + primaryKey: statement.columnPk, + generated: statement.columnGenerated, + }, + schema: statement.schema, + type: 'alter_table_add_column', + }), + ]; + } else { + columnGenerated = statement.columnGenerated + ? ` GENERATED ALWAYS AS (${statement.columnGenerated?.as}) ${statement.columnGenerated?.type.toUpperCase()}` + : ''; + } + } else if (statement.type === 'alter_table_alter_column_drop_generated') { + columnType = ` ${statement.newDataType}`; + columnNotNull = statement.columnNotNull ? ` NOT NULL` : ''; + columnOnUpdate = columnOnUpdate = statement.columnOnUpdate + ? ` ON UPDATE CURRENT_TIMESTAMP` + : ''; + columnDefault = statement.columnDefault + ? ` DEFAULT ${statement.columnDefault}` + : ''; + columnAutoincrement = statement.columnAutoIncrement + ? ' AUTO_INCREMENT' + : ''; + + if (statement.oldColumn?.generated?.type === 'virtual') { + return [ + new SingleStoreAlterTableDropColumnConvertor().convert({ + type: 'alter_table_drop_column', + tableName: statement.tableName, + columnName: statement.columnName, + schema: statement.schema, + }), + new SingleStoreAlterTableAddColumnConvertor().convert({ + tableName, + column: { + name: columnName, + type: statement.newDataType, + notNull: statement.columnNotNull, + default: statement.columnDefault, + onUpdate: statement.columnOnUpdate, + autoincrement: statement.columnAutoIncrement, + primaryKey: statement.columnPk, + generated: statement.columnGenerated, + }, + schema: statement.schema, + type: 'alter_table_add_column', + }), + ]; + } + } else { + columnType = ` ${statement.newDataType}`; + columnNotNull = statement.columnNotNull ? ` NOT NULL` : ''; + columnOnUpdate = columnOnUpdate = statement.columnOnUpdate + ? ` ON UPDATE CURRENT_TIMESTAMP` + : ''; + columnDefault = statement.columnDefault + ? ` DEFAULT ${statement.columnDefault}` + : ''; + columnAutoincrement = statement.columnAutoIncrement + ? ' AUTO_INCREMENT' + : ''; + columnGenerated = statement.columnGenerated + ? ` GENERATED ALWAYS AS (${statement.columnGenerated?.as}) ${statement.columnGenerated?.type.toUpperCase()}` + : ''; + } + + // Seems like getting value from simple json2 shanpshot makes dates be dates + columnDefault = columnDefault instanceof Date + ? columnDefault.toISOString() + : columnDefault; + + return `ALTER TABLE \`${tableName}\` MODIFY COLUMN \`${columnName}\`${columnType}${columnAutoincrement}${columnNotNull}${columnDefault}${columnOnUpdate}${columnGenerated};`; + } +} +class SqliteAlterTableAlterColumnDropDefaultConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return ( + statement.type === 'alter_table_alter_column_drop_default' + && dialect === 'sqlite' + ); + } + + convert(statement: JsonAlterColumnDropDefaultStatement) { + return ( + '/*\n SQLite does not support "Drop default from column" out of the box, we do not generate automatic migration for that, so it has to be done manually' + + '\n Please refer to: https://www.techonthenet.com/sqlite/tables/alter_table.php' + + '\n https://www.sqlite.org/lang_altertable.html' + + '\n https://stackoverflow.com/questions/2083543/modify-a-columns-type-in-sqlite3' + + "\n\n Due to that we don't generate migration automatically and it has to be done manually" + + '\n*/' + ); + } +} + +class PgAlterTableCreateCompositePrimaryKeyConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'create_composite_pk' && dialect === 'postgresql'; + } + + convert(statement: JsonCreateCompositePK) { + const { name, columns } = PgSquasher.unsquashPK(statement.data); + + const tableNameWithSchema = statement.schema + ? `"${statement.schema}"."${statement.tableName}"` + : `"${statement.tableName}"`; + + return `ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT "${statement.constraintName}" PRIMARY KEY("${ + columns.join('","') + }");`; + } +} +class PgAlterTableDeleteCompositePrimaryKeyConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'delete_composite_pk' && dialect === 'postgresql'; + } + + convert(statement: JsonDeleteCompositePK) { + const { name, columns } = PgSquasher.unsquashPK(statement.data); + + const tableNameWithSchema = statement.schema + ? `"${statement.schema}"."${statement.tableName}"` + : `"${statement.tableName}"`; + + return `ALTER TABLE ${tableNameWithSchema} DROP CONSTRAINT "${statement.constraintName}";`; + } +} + +class PgAlterTableAlterCompositePrimaryKeyConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'alter_composite_pk' && dialect === 'postgresql'; + } + + convert(statement: JsonAlterCompositePK) { + const { name, columns } = PgSquasher.unsquashPK(statement.old); + const { name: newName, columns: newColumns } = PgSquasher.unsquashPK( + statement.new, + ); + + const tableNameWithSchema = statement.schema + ? `"${statement.schema}"."${statement.tableName}"` + : `"${statement.tableName}"`; + + return `ALTER TABLE ${tableNameWithSchema} DROP CONSTRAINT "${statement.oldConstraintName}";\n${BREAKPOINT}ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT "${statement.newConstraintName}" PRIMARY KEY("${ + newColumns.join('","') + }");`; + } +} + +class MySqlAlterTableCreateCompositePrimaryKeyConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'create_composite_pk' && dialect === 'mysql'; + } + + convert(statement: JsonCreateCompositePK) { + const { name, columns } = MySqlSquasher.unsquashPK(statement.data); + return `ALTER TABLE \`${statement.tableName}\` ADD PRIMARY KEY(\`${columns.join('`,`')}\`);`; + } +} + +class MySqlAlterTableDeleteCompositePrimaryKeyConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'delete_composite_pk' && dialect === 'mysql'; + } convert(statement: JsonDeleteCompositePK) { const { name, columns } = MySqlSquasher.unsquashPK(statement.data); @@ -2553,6 +3083,89 @@ class MySqlAlterTableAlterCompositePrimaryKeyConvertor extends Convertor { } } +class SqliteAlterTableCreateCompositePrimaryKeyConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'create_composite_pk' && dialect === 'sqlite'; + } + + convert(statement: JsonCreateCompositePK) { + let msg = '/*\n'; + msg += `You're trying to add PRIMARY KEY(${statement.data}) to '${statement.tableName}' table\n`; + msg += 'SQLite does not support adding primary key to an already created table\n'; + msg += 'You can do it in 3 steps with drizzle orm:\n'; + msg += ' - create new mirror table with needed pk, rename current table to old_table, generate SQL\n'; + msg += ' - migrate old data from one table to another\n'; + msg += ' - delete old_table in schema, generate sql\n\n'; + msg += 'or create manual migration like below:\n\n'; + msg += 'ALTER TABLE table_name RENAME TO old_table;\n'; + msg += 'CREATE TABLE table_name (\n'; + msg += '\tcolumn1 datatype [ NULL | NOT NULL ],\n'; + msg += '\tcolumn2 datatype [ NULL | NOT NULL ],\n'; + msg += '\t...\n'; + msg += '\tPRIMARY KEY (pk_col1, pk_col2, ... pk_col_n)\n'; + msg += ' );\n'; + msg += 'INSERT INTO table_name SELECT * FROM old_table;\n\n'; + msg += "Due to that we don't generate migration automatically and it has to be done manually\n"; + msg += '*/\n'; + return msg; + } +} +class SqliteAlterTableDeleteCompositePrimaryKeyConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'delete_composite_pk' && dialect === 'sqlite'; + } + + convert(statement: JsonDeleteCompositePK) { + let msg = '/*\n'; + msg += `You're trying to delete PRIMARY KEY(${statement.data}) from '${statement.tableName}' table\n`; + msg += 'SQLite does not supportprimary key deletion from existing table\n'; + msg += 'You can do it in 3 steps with drizzle orm:\n'; + msg += ' - create new mirror table table without pk, rename current table to old_table, generate SQL\n'; + msg += ' - migrate old data from one table to another\n'; + msg += ' - delete old_table in schema, generate sql\n\n'; + msg += 'or create manual migration like below:\n\n'; + msg += 'ALTER TABLE table_name RENAME TO old_table;\n'; + msg += 'CREATE TABLE table_name (\n'; + msg += '\tcolumn1 datatype [ NULL | NOT NULL ],\n'; + msg += '\tcolumn2 datatype [ NULL | NOT NULL ],\n'; + msg += '\t...\n'; + msg += '\tPRIMARY KEY (pk_col1, pk_col2, ... pk_col_n)\n'; + msg += ' );\n'; + msg += 'INSERT INTO table_name SELECT * FROM old_table;\n\n'; + msg += "Due to that we don't generate migration automatically and it has to be done manually\n"; + msg += '*/\n'; + return msg; + } +} + +class SqliteAlterTableAlterCompositePrimaryKeyConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'alter_composite_pk' && dialect === 'sqlite'; + } + + convert(statement: JsonAlterCompositePK) { + let msg = '/*\n'; + msg += 'SQLite does not support altering primary key\n'; + msg += 'You can do it in 3 steps with drizzle orm:\n'; + msg += ' - create new mirror table with needed pk, rename current table to old_table, generate SQL\n'; + msg += ' - migrate old data from one table to another\n'; + msg += ' - delete old_table in schema, generate sql\n\n'; + msg += 'or create manual migration like below:\n\n'; + msg += 'ALTER TABLE table_name RENAME TO old_table;\n'; + msg += 'CREATE TABLE table_name (\n'; + msg += '\tcolumn1 datatype [ NULL | NOT NULL ],\n'; + msg += '\tcolumn2 datatype [ NULL | NOT NULL ],\n'; + msg += '\t...\n'; + msg += '\tPRIMARY KEY (pk_col1, pk_col2, ... pk_col_n)\n'; + msg += ' );\n'; + msg += 'INSERT INTO table_name SELECT * FROM old_table;\n\n'; + msg += "Due to that we don't generate migration automatically and it has to be done manually\n"; + msg += '*/\n'; + + return msg; + } +} + class PgAlterTableAlterColumnSetPrimaryKeyConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return ( @@ -2895,6 +3508,32 @@ class CreateMySqlIndexConvertor extends Convertor { } } +class CreateSingleStoreIndexConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'create_index' && dialect === 'singlestore'; + } + + convert(statement: JsonCreateIndexStatement): string { + // should be changed + const { name, columns, isUnique } = SingleStoreSquasher.unsquashIdx( + statement.data, + ); + const indexPart = isUnique ? 'UNIQUE INDEX' : 'INDEX'; + + const uniqueString = columns + .map((it) => { + return statement.internal?.indexes + ? statement.internal?.indexes[name]?.columns[it]?.isExpression + ? it + : `\`${it}\`` + : `\`${it}\``; + }) + .join(','); + + return `CREATE ${indexPart} \`${name}\` ON \`${statement.tableName}\` (${uniqueString});`; + } +} + export class CreateSqliteIndexConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return statement.type === 'create_index' && (dialect === 'sqlite' || dialect === 'turso'); @@ -3039,6 +3678,17 @@ class MySqlDropIndexConvertor extends Convertor { } } +class SingleStoreDropIndexConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'drop_index' && dialect === 'singlestore'; + } + + convert(statement: JsonDropIndexStatement): string { + const { name } = SingleStoreSquasher.unsquashIdx(statement.data); + return `DROP INDEX \`${name}\` ON \`${statement.tableName}\`;`; + } +} + class SQLiteRecreateTableConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return ( @@ -3174,6 +3824,7 @@ class LibSQLRecreateTableConvertor extends Convertor { const convertors: Convertor[] = []; convertors.push(new PgCreateTableConvertor()); convertors.push(new MySqlCreateTableConvertor()); +convertors.push(new SingleStoreCreateTableConvertor()); convertors.push(new SQLiteCreateTableConvertor()); convertors.push(new SQLiteRecreateTableConvertor()); convertors.push(new LibSQLRecreateTableConvertor()); @@ -3210,22 +3861,27 @@ convertors.push(new AlterPgSequenceConvertor()); convertors.push(new PgDropTableConvertor()); convertors.push(new MySQLDropTableConvertor()); +convertors.push(new SingleStoreDropTableConvertor()); convertors.push(new SQLiteDropTableConvertor()); convertors.push(new PgRenameTableConvertor()); convertors.push(new MySqlRenameTableConvertor()); +convertors.push(new SingleStoreRenameTableConvertor()); convertors.push(new SqliteRenameTableConvertor()); convertors.push(new PgAlterTableRenameColumnConvertor()); convertors.push(new MySqlAlterTableRenameColumnConvertor()); +convertors.push(new SingleStoreAlterTableRenameColumnConvertor()); convertors.push(new SQLiteAlterTableRenameColumnConvertor()); convertors.push(new PgAlterTableDropColumnConvertor()); convertors.push(new MySqlAlterTableDropColumnConvertor()); +convertors.push(new SingleStoreAlterTableDropColumnConvertor()); convertors.push(new SQLiteAlterTableDropColumnConvertor()); convertors.push(new PgAlterTableAddColumnConvertor()); convertors.push(new MySqlAlterTableAddColumnConvertor()); +convertors.push(new SingleStoreAlterTableAddColumnConvertor()); convertors.push(new SQLiteAlterTableAddColumnConvertor()); convertors.push(new PgAlterTableAlterColumnSetTypeConvertor()); @@ -3241,13 +3897,18 @@ convertors.push(new MySqlAlterTableDeleteCheckConstraintConvertor()); convertors.push(new MySQLAlterTableAddUniqueConstraintConvertor()); convertors.push(new MySQLAlterTableDropUniqueConstraintConvertor()); +convertors.push(new SingleStoreAlterTableAddUniqueConstraintConvertor()); +convertors.push(new SingleStoreAlterTableDropUniqueConstraintConvertor()); + convertors.push(new CreatePgIndexConvertor()); convertors.push(new CreateMySqlIndexConvertor()); +convertors.push(new CreateSingleStoreIndexConvertor()); convertors.push(new CreateSqliteIndexConvertor()); convertors.push(new PgDropIndexConvertor()); convertors.push(new SqliteDropIndexConvertor()); convertors.push(new MySqlDropIndexConvertor()); +convertors.push(new SingleStoreDropIndexConvertor()); convertors.push(new PgAlterTableAlterColumnSetPrimaryKeyConvertor()); convertors.push(new PgAlterTableAlterColumnDropPrimaryKeyConvertor()); @@ -3281,6 +3942,8 @@ convertors.push(new PgAlterTableAlterColumnAlterrGeneratedConvertor()); convertors.push(new MySqlAlterTableAlterColumnAlterrGeneratedConvertor()); +convertors.push(new SingleStoreAlterTableAlterColumnAlterrGeneratedConvertor()); + convertors.push(new SqliteAlterTableAlterColumnDropGeneratedConvertor()); convertors.push(new SqliteAlterTableAlterColumnAlterGeneratedConvertor()); convertors.push(new SqliteAlterTableAlterColumnSetExpressionConvertor()); @@ -3290,6 +3953,8 @@ convertors.push(new LibSQLModifyColumn()); // convertors.push(new MySqlAlterTableAlterColumnSetDefaultConvertor()); // convertors.push(new MySqlAlterTableAlterColumnDropDefaultConvertor()); +convertors.push(new SingleStoreModifyColumn()); + convertors.push(new PgCreateForeignKeyConvertor()); convertors.push(new MySqlCreateForeignKeyConvertor()); @@ -3321,6 +3986,9 @@ convertors.push(new MySqlAlterTableCreateCompositePrimaryKeyConvertor()); convertors.push(new MySqlAlterTableAddPk()); convertors.push(new MySqlAlterTableAlterCompositePrimaryKeyConvertor()); +convertors.push(new SingleStoreAlterTableDropPk()); +convertors.push(new SingleStoreAlterTableAddPk()); + export function fromJson( statements: JsonStatement[], dialect: Dialect, diff --git a/drizzle-kit/src/utils.ts b/drizzle-kit/src/utils.ts index 685e2efb5..2638ca4ef 100644 --- a/drizzle-kit/src/utils.ts +++ b/drizzle-kit/src/utils.ts @@ -10,6 +10,7 @@ import { assertUnreachable, snapshotVersion } from './global'; import type { Dialect } from './schemaValidator'; import { backwardCompatibleMysqlSchema } from './serializer/mysqlSchema'; import { backwardCompatiblePgSchema } from './serializer/pgSchema'; +import { backwardCompatibleSingleStoreSchema } from './serializer/singlestoreSchema'; import { backwardCompatibleSqliteSchema } from './serializer/sqliteSchema'; import type { ProxyParams } from './serializer/studio'; @@ -123,6 +124,8 @@ const validatorForDialect = (dialect: Dialect) => { return { validator: backwardCompatibleSqliteSchema, version: 6 }; case 'mysql': return { validator: backwardCompatibleMysqlSchema, version: 5 }; + case 'singlestore': + return { validator: backwardCompatibleSingleStoreSchema, version: 1 }; } }; diff --git a/drizzle-kit/tests/introspect/singlestore.test.ts b/drizzle-kit/tests/introspect/singlestore.test.ts new file mode 100644 index 000000000..71960c3f7 --- /dev/null +++ b/drizzle-kit/tests/introspect/singlestore.test.ts @@ -0,0 +1,275 @@ +import Docker from 'dockerode'; +import 'dotenv/config'; +import { SQL, sql } from 'drizzle-orm'; +import { + bigint, + char, + decimal, + double, + float, + int, + mediumint, + singlestoreTable, + smallint, + text, + tinyint, + varchar, +} from 'drizzle-orm/singlestore-core'; +import * as fs from 'fs'; +import getPort from 'get-port'; +import { Connection, createConnection } from 'mysql2/promise'; +import { introspectSingleStoreToFile } from 'tests/schemaDiffer'; +import { v4 as uuid } from 'uuid'; +import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; + +let client: Connection; +let singlestoreContainer: Docker.Container; + +async function createDockerDB(): Promise { + const docker = new Docker(); + const port = await getPort({ port: 3306 }); + const image = 'ghcr.io/singlestore-labs/singlestoredb-dev:latest'; + + const pullStream = await docker.pull(image); + await new Promise((resolve, reject) => + docker.modem.followProgress(pullStream, (err) => err ? reject(err) : resolve(err)) + ); + + singlestoreContainer = await docker.createContainer({ + Image: image, + Env: ['ROOT_PASSWORD=singlestore'], + name: `drizzle-integration-tests-${uuid()}`, + HostConfig: { + AutoRemove: true, + PortBindings: { + '3306/tcp': [{ HostPort: `${port}` }], + }, + }, + }); + + await singlestoreContainer.start(); + await new Promise((resolve) => setTimeout(resolve, 4000)); + + return `singlestore://root:singlestore@localhost:${port}/`; +} + +beforeAll(async () => { + const connectionString = process.env.SINGLESTORE_CONNECTION_STRING ?? await createDockerDB(); + + const sleep = 1000; + let timeLeft = 20000; + let connected = false; + let lastError: unknown | undefined; + do { + try { + client = await createConnection(connectionString); + await client.connect(); + connected = true; + break; + } catch (e) { + lastError = e; + await new Promise((resolve) => setTimeout(resolve, sleep)); + timeLeft -= sleep; + } + } while (timeLeft > 0); + if (!connected) { + console.error('Cannot connect to SingleStore'); + await client?.end().catch(console.error); + await singlestoreContainer?.stop().catch(console.error); + throw lastError; + } +}); + +afterAll(async () => { + await client?.end().catch(console.error); + await singlestoreContainer?.stop().catch(console.error); +}); + +beforeEach(async () => { + await client.query(`drop database if exists \`drizzle\`;`); + await client.query(`create database \`drizzle\`;`); + await client.query(`use \`drizzle\`;`); +}); + +if (!fs.existsSync('tests/introspect/singlestore')) { + fs.mkdirSync('tests/introspect/singlestore'); +} + +// TODO: Unskip this test when generated column is implemented +/* test.skip('generated always column: link to another column', async () => { + const schema = { + users: singlestoreTable('users', { + id: int('id'), + email: text('email'), + generatedEmail: text('generatedEmail').generatedAlwaysAs( + (): SQL => sql`\`email\``, + ), + }), + }; + + const { statements, sqlStatements } = await introspectSingleStoreToFile( + client, + schema, + 'generated-link-column', + 'drizzle', + ); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); */ + +// TODO: Unskip this test when generated column is implemented +/* test.skip('generated always column virtual: link to another column', async () => { + const schema = { + users: singlestoreTable('users', { + id: int('id'), + email: text('email'), + generatedEmail: text('generatedEmail').generatedAlwaysAs( + (): SQL => sql`\`email\``, + { mode: 'virtual' }, + ), + }), + }; + + const { statements, sqlStatements } = await introspectSingleStoreToFile( + client, + schema, + 'generated-link-column-virtual', + 'drizzle', + ); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); */ + +test('Default value of character type column: char', async () => { + const schema = { + users: singlestoreTable('users', { + id: int('id'), + sortKey: char('sortKey', { length: 255 }).default('0'), + }), + }; + + const { statements, sqlStatements } = await introspectSingleStoreToFile( + client, + schema, + 'default-value-char-column', + 'drizzle', + ); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test('Default value of character type column: varchar', async () => { + const schema = { + users: singlestoreTable('users', { + id: int('id'), + sortKey: varchar('sortKey', { length: 255 }).default('0'), + }), + }; + + const { statements, sqlStatements } = await introspectSingleStoreToFile( + client, + schema, + 'default-value-varchar-column', + 'drizzle', + ); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +// TODO: Unskip this test when views are implemented +/* test('view #1', async () => { + const users = singlestoreTable('users', { id: int('id') }); + const testView = singlestoreView('some_view', { id: int('id') }).as( + sql`select \`drizzle\`.\`users\`.\`id\` AS \`id\` from \`drizzle\`.\`users\``, + ); + + const schema = { + users: users, + testView, + }; + + const { statements, sqlStatements } = await introspectSingleStoreToFile( + client, + schema, + 'view-1', + 'drizzle', + ); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); */ + +// TODO: Unskip this test when views are implemented +/* test('view #2', async () => { + const users = singlestoreTable('some_users', { id: int('id') }); + const testView = singlestoreView('some_view', { id: int('id') }).algorithm('temptable').sqlSecurity('definer').as( + sql`SELECT * FROM ${users}`, + ); + + const schema = { + users: users, + testView, + }; + + const { statements, sqlStatements } = await introspectSingleStoreToFile( + client, + schema, + 'view-2', + 'drizzle', + ); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); */ + +test('handle float type', async () => { + const schema = { + table: singlestoreTable('table', { + col1: float(), + col2: float({ precision: 2 }), + col3: float({ precision: 2, scale: 1 }), + }), + }; + + const { statements, sqlStatements } = await introspectSingleStoreToFile( + client, + schema, + 'handle-float-type', + 'drizzle', + ); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test('handle unsigned numerical types', async () => { + const schema = { + table: singlestoreTable('table', { + col1: int({ unsigned: true }), + col2: tinyint({ unsigned: true }), + col3: smallint({ unsigned: true }), + col4: mediumint({ unsigned: true }), + col5: bigint({ mode: 'number', unsigned: true }), + col6: float({ unsigned: true }), + col7: float({ precision: 2, scale: 1, unsigned: true }), + col8: double({ unsigned: true }), + col9: double({ precision: 2, scale: 1, unsigned: true }), + col10: decimal({ unsigned: true }), + col11: decimal({ precision: 2, scale: 1, unsigned: true }), + }), + }; + + const { statements, sqlStatements } = await introspectSingleStoreToFile( + client, + schema, + 'handle-unsigned-numerical-types', + 'drizzle', + ); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); diff --git a/drizzle-kit/tests/push/singlestore-push.test.ts b/drizzle-kit/tests/push/singlestore-push.test.ts new file mode 100644 index 000000000..4ad3c6c0e --- /dev/null +++ b/drizzle-kit/tests/push/singlestore-push.test.ts @@ -0,0 +1,266 @@ +import Docker from 'dockerode'; +import { int, singlestoreTable } from 'drizzle-orm/singlestore-core'; +import fs from 'fs'; +import getPort from 'get-port'; +import { Connection, createConnection } from 'mysql2/promise'; +import { diffTestSchemasPushSingleStore } from 'tests/schemaDiffer'; +import { v4 as uuid } from 'uuid'; +import { afterAll, beforeAll, expect, test } from 'vitest'; + +let client: Connection; +let singlestoreContainer: Docker.Container; + +async function createDockerDB(): Promise { + const docker = new Docker(); + const port = await getPort({ port: 3306 }); + const image = 'ghcr.io/singlestore-labs/singlestoredb-dev:latest'; + + const pullStream = await docker.pull(image); + await new Promise((resolve, reject) => + docker.modem.followProgress(pullStream, (err) => err ? reject(err) : resolve(err)) + ); + + singlestoreContainer = await docker.createContainer({ + Image: image, + Env: ['ROOT_PASSWORD=singlestore'], + name: `drizzle-integration-tests-${uuid()}`, + HostConfig: { + AutoRemove: true, + PortBindings: { + '3306/tcp': [{ HostPort: `${port}` }], + }, + }, + }); + + await singlestoreContainer.start(); + await new Promise((resolve) => setTimeout(resolve, 4000)); + + return `singlestore://root:singlestore@localhost:${port}/`; +} + +beforeAll(async () => { + const connectionString = process.env.MYSQL_CONNECTION_STRING ?? (await createDockerDB()); + + const sleep = 1000; + let timeLeft = 20000; + let connected = false; + let lastError: unknown | undefined; + do { + try { + client = await createConnection(connectionString); + await client.connect(); + connected = true; + break; + } catch (e) { + lastError = e; + await new Promise((resolve) => setTimeout(resolve, sleep)); + timeLeft -= sleep; + } + } while (timeLeft > 0); + if (!connected) { + console.error('Cannot connect to MySQL'); + await client?.end().catch(console.error); + await singlestoreContainer?.stop().catch(console.error); + throw lastError; + } + + await client.query('DROP DATABASE IF EXISTS drizzle;'); + await client.query('CREATE DATABASE drizzle;'); + await client.query('USE drizzle;'); +}); + +afterAll(async () => { + await client?.end().catch(console.error); + await singlestoreContainer?.stop().catch(console.error); +}); + +if (!fs.existsSync('tests/push/singlestore')) { + fs.mkdirSync('tests/push/singlestore'); +} + +test('db has checks. Push with same names', async () => { + const schema1 = { + test: singlestoreTable('test', { + id: int('id').primaryKey(), + values: int('values').default(1), + }), + }; + const schema2 = { + test: singlestoreTable('test', { + id: int('id').primaryKey(), + values: int('values').default(1), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasPushSingleStore( + client, + schema1, + schema2, + [], + 'drizzle', + ); + + expect(statements).toStrictEqual([]); + expect(sqlStatements).toStrictEqual([]); + + await client.query(`DROP TABLE \`test\`;`); +}); + +// TODO: Unskip this test when views are implemented +/* test.skip.skip('create view', async () => { + const table = singlestoreTable('test', { + id: int('id').primaryKey(), + }); + + const schema1 = { + test: table, + }; + + const schema2 = { + test: table, + view: singlestoreView('view').as((qb) => qb.select().from(table)), + }; + + const { statements, sqlStatements } = await diffTestSchemasPushSingleStore( + client, + schema1, + schema2, + [], + 'drizzle', + false, + ); + + expect(statements).toStrictEqual([ + { + definition: 'select `id` from `test`', + name: 'view', + type: 'singlestore_create_view', + replace: false, + sqlSecurity: 'definer', + withCheckOption: undefined, + algorithm: 'undefined', + }, + ]); + expect(sqlStatements).toStrictEqual([ + `CREATE ALGORITHM = undefined +SQL SECURITY definer +VIEW \`view\` AS (select \`id\` from \`test\`);`, + ]); + + await client.query(`DROP TABLE \`test\`;`); +}); */ + +// TODO: Unskip this test when views are implemented +/* test.skip('drop view', async () => { + const table = singlestoreTable('test', { + id: int('id').primaryKey(), + }); + + const schema1 = { + test: table, + view: singlestoreView('view').as((qb) => qb.select().from(table)), + }; + + const schema2 = { + test: table, + }; + + const { statements, sqlStatements } = await diffTestSchemasPushSingleStore( + client, + schema1, + schema2, + [], + 'drizzle', + false, + ); + + expect(statements).toStrictEqual([ + { + name: 'view', + type: 'drop_view', + }, + ]); + expect(sqlStatements).toStrictEqual(['DROP VIEW `view`;']); + await client.query(`DROP TABLE \`test\`;`); + await client.query(`DROP VIEW \`view\`;`); +}); */ + +// TODO: Unskip this test when views are implemented +/* test.skip('alter view ".as"', async () => { + const table = singlestoreTable('test', { + id: int('id').primaryKey(), + }); + + const schema1 = { + test: table, + view: singlestoreView('view').as((qb) => + qb + .select() + .from(table) + .where(sql`${table.id} = 1`) + ), + }; + + const schema2 = { + test: table, + view: singlestoreView('view').as((qb) => qb.select().from(table)), + }; + + const { statements, sqlStatements } = await diffTestSchemasPushSingleStore( + client, + schema1, + schema2, + [], + 'drizzle', + false, + ); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); + + await client.query(`DROP TABLE \`test\`;`); + await client.query(`DROP VIEW \`view\`;`); +}); */ + +// TODO: Unskip this test when views are implemented +/* test.skip('alter meta options with distinct in definition', async () => { + const table = singlestoreTable('test', { + id: int('id').primaryKey(), + }); + + const schema1 = { + test: table, + view: singlestoreView('view') + .withCheckOption('cascaded') + .sqlSecurity('definer') + .algorithm('merge') + .as((qb) => + qb + .selectDistinct() + .from(table) + .where(sql`${table.id} = 1`) + ), + }; + + const schema2 = { + test: table, + view: singlestoreView('view') + .withCheckOption('cascaded') + .sqlSecurity('definer') + .algorithm('undefined') + .as((qb) => qb.selectDistinct().from(table)), + }; + + await expect( + diffTestSchemasPushSingleStore( + client, + schema1, + schema2, + [], + 'drizzle', + false, + ), + ).rejects.toThrowError(); + + await client.query(`DROP TABLE \`test\`;`); +}); */ diff --git a/drizzle-kit/tests/push/singlestore.test.ts b/drizzle-kit/tests/push/singlestore.test.ts new file mode 100644 index 000000000..82c72063c --- /dev/null +++ b/drizzle-kit/tests/push/singlestore.test.ts @@ -0,0 +1,439 @@ +import Docker from 'dockerode'; +import { SQL, sql } from 'drizzle-orm'; +import { + bigint, + binary, + char, + date, + datetime, + decimal, + double, + float, + int, + json, + mediumint, + primaryKey, + serial, + singlestoreEnum, + singlestoreTable, + smallint, + text, + time, + timestamp, + tinyint, + varbinary, + varchar, + year, +} from 'drizzle-orm/singlestore-core'; +import getPort from 'get-port'; +import { Connection, createConnection } from 'mysql2/promise'; +import { diffTestSchemasPushSingleStore, diffTestSchemasSingleStore } from 'tests/schemaDiffer'; +import { v4 as uuid } from 'uuid'; +import { expect } from 'vitest'; +import { DialectSuite, run } from './common'; + +async function createDockerDB(context: any): Promise { + const docker = new Docker(); + const port = await getPort({ port: 3306 }); + const image = 'ghcr.io/singlestore-labs/singlestoredb-dev:latest'; + + const pullStream = await docker.pull(image); + await new Promise((resolve, reject) => + docker.modem.followProgress(pullStream, (err) => err ? reject(err) : resolve(err)) + ); + + context.singlestoreContainer = await docker.createContainer({ + Image: image, + Env: ['ROOT_PASSWORD=singlestore'], + name: `drizzle-integration-tests-${uuid()}`, + HostConfig: { + AutoRemove: true, + PortBindings: { + '3306/tcp': [{ HostPort: `${port}` }], + }, + }, + }); + + await context.singlestoreContainer.start(); + await new Promise((resolve) => setTimeout(resolve, 4000)); + + return `singlestore://root:singlestore@localhost:${port}/`; +} + +const singlestoreSuite: DialectSuite = { + allTypes: async function(context: any): Promise { + const schema1 = { + allBigInts: singlestoreTable('all_big_ints', { + simple: bigint('simple', { mode: 'number' }), + columnNotNull: bigint('column_not_null', { mode: 'number' }).notNull(), + columnDefault: bigint('column_default', { mode: 'number' }).default(12), + columnDefaultSql: bigint('column_default_sql', { + mode: 'number', + }).default(12), + }), + allBools: singlestoreTable('all_bools', { + simple: tinyint('simple'), + columnNotNull: tinyint('column_not_null').notNull(), + columnDefault: tinyint('column_default').default(1), + }), + allChars: singlestoreTable('all_chars', { + simple: char('simple', { length: 1 }), + columnNotNull: char('column_not_null', { length: 45 }).notNull(), + // columnDefault: char("column_default", { length: 1 }).default("h"), + columnDefaultSql: char('column_default_sql', { length: 1 }).default( + 'h', + ), + }), + // allDateTimes: singlestoreTable("all_date_times", { + // simple: datetime("simple", { mode: "string", fsp: 1 }), + // columnNotNull: datetime("column_not_null", { + // mode: "string", + // }).notNull(), + // columnDefault: datetime("column_default", { mode: "string" }).default( + // "2023-03-01 14:05:29" + // ), + // }), + allDates: singlestoreTable('all_dates', { + simple: date('simple', { mode: 'string' }), + column_not_null: date('column_not_null', { mode: 'string' }).notNull(), + column_default: date('column_default', { mode: 'string' }).default( + '2023-03-01', + ), + }), + allDecimals: singlestoreTable('all_decimals', { + simple: decimal('simple', { precision: 1, scale: 0 }), + columnNotNull: decimal('column_not_null', { + precision: 45, + scale: 3, + }).notNull(), + columnDefault: decimal('column_default', { + precision: 10, + scale: 0, + }).default('100'), + columnDefaultSql: decimal('column_default_sql', { + precision: 10, + scale: 0, + }).default('101'), + }), + + allDoubles: singlestoreTable('all_doubles', { + simple: double('simple'), + columnNotNull: double('column_not_null').notNull(), + columnDefault: double('column_default').default(100), + columnDefaultSql: double('column_default_sql').default(101), + }), + + allEnums: singlestoreTable('all_enums', { + simple: singlestoreEnum('simple', ['hi', 'hello']), + }), + + allEnums1: singlestoreTable('all_enums1', { + simple: singlestoreEnum('simple', ['hi', 'hello']).default('hi'), + }), + + allFloats: singlestoreTable('all_floats', { + columnNotNull: float('column_not_null').notNull(), + columnDefault: float('column_default').default(100), + columnDefaultSql: float('column_default_sql').default(101), + }), + + allInts: singlestoreTable('all_ints', { + simple: int('simple'), + columnNotNull: int('column_not_null').notNull(), + columnDefault: int('column_default').default(100), + columnDefaultSql: int('column_default_sql').default(101), + }), + + allIntsRef: singlestoreTable('all_ints_ref', { + simple: int('simple'), + columnNotNull: int('column_not_null').notNull(), + columnDefault: int('column_default').default(100), + columnDefaultSql: int('column_default_sql').default(101), + }), + + // allJsons: singlestoreTable("all_jsons", { + // columnDefaultObject: json("column_default_object") + // .default({ hello: "world world" }) + // .notNull(), + // columnDefaultArray: json("column_default_array").default({ + // hello: { "world world": ["foo", "bar"] }, + // foo: "bar", + // fe: 23, + // }), + // column: json("column"), + // }), + + allMInts: singlestoreTable('all_m_ints', { + simple: mediumint('simple'), + columnNotNull: mediumint('column_not_null').notNull(), + columnDefault: mediumint('column_default').default(100), + columnDefaultSql: mediumint('column_default_sql').default(101), + }), + + allReals: singlestoreTable('all_reals', { + simple: double('simple', { precision: 5, scale: 2 }), + columnNotNull: double('column_not_null').notNull(), + columnDefault: double('column_default').default(100), + columnDefaultSql: double('column_default_sql').default(101), + }), + + allSInts: singlestoreTable('all_s_ints', { + simple: smallint('simple'), + columnNotNull: smallint('column_not_null').notNull(), + columnDefault: smallint('column_default').default(100), + columnDefaultSql: smallint('column_default_sql').default(101), + }), + + // allSmallSerials: singlestoreTable("all_small_serials", { + // columnAll: serial("column_all").notNull(), + // }), + + allTInts: singlestoreTable('all_t_ints', { + simple: tinyint('simple'), + columnNotNull: tinyint('column_not_null').notNull(), + columnDefault: tinyint('column_default').default(10), + columnDefaultSql: tinyint('column_default_sql').default(11), + }), + + allTexts: singlestoreTable('all_texts', { + simple: text('simple'), + columnNotNull: text('column_not_null').notNull(), + columnDefault: text('column_default').default('hello'), + columnDefaultSql: text('column_default_sql').default('hello'), + }), + + allTimes: singlestoreTable('all_times', { + // simple: time("simple", { fsp: 1 }), + columnNotNull: time('column_not_null').notNull(), + columnDefault: time('column_default').default('22:12:12'), + }), + + allTimestamps: singlestoreTable('all_timestamps', { + // columnDateNow: timestamp("column_date_now", { + // fsp: 1, + // mode: "string", + // }).default(sql`(now())`), + columnAll: timestamp('column_all', { mode: 'string' }) + .default('2023-03-01 14:05:29') + .notNull(), + column: timestamp('column', { mode: 'string' }).default( + '2023-02-28 16:18:31', + ), + }), + + allVarChars: singlestoreTable('all_var_chars', { + simple: varchar('simple', { length: 100 }), + columnNotNull: varchar('column_not_null', { length: 45 }).notNull(), + columnDefault: varchar('column_default', { length: 100 }).default( + 'hello', + ), + columnDefaultSql: varchar('column_default_sql', { + length: 100, + }).default('hello'), + }), + + allVarbinaries: singlestoreTable('all_varbinaries', { + simple: varbinary('simple', { length: 100 }), + columnNotNull: varbinary('column_not_null', { length: 100 }).notNull(), + columnDefault: varbinary('column_default', { length: 12 }), + }), + + allYears: singlestoreTable('all_years', { + simple: year('simple'), + columnNotNull: year('column_not_null').notNull(), + columnDefault: year('column_default').default(2022), + }), + + binafry: singlestoreTable('binary', { + simple: binary('simple', { length: 1 }), + columnNotNull: binary('column_not_null', { length: 1 }).notNull(), + columnDefault: binary('column_default', { length: 12 }), + }), + }; + + const { statements } = await diffTestSchemasPushSingleStore( + context.client as Connection, + schema1, + schema1, + [], + 'drizzle', + false, + ); + console.log(statements); + expect(statements.length).toBe(0); + expect(statements).toEqual([]); + + const { sqlStatements: dropStatements } = await diffTestSchemasSingleStore( + schema1, + {}, + [], + false, + ); + + for (const st of dropStatements) { + await context.client.query(st); + } + }, + addBasicIndexes: function(context?: any): Promise { + return {} as any; + }, + changeIndexFields: function(context?: any): Promise { + return {} as any; + }, + dropIndex: function(context?: any): Promise { + return {} as any; + }, + indexesToBeNotTriggered: function(context?: any): Promise { + return {} as any; + }, + indexesTestCase1: function(context?: any): Promise { + return {} as any; + }, + async case1() { + // TODO: implement if needed + expect(true).toBe(true); + }, + addNotNull: function(context?: any): Promise { + return {} as any; + }, + addNotNullWithDataNoRollback: function(context?: any): Promise { + return {} as any; + }, + addBasicSequences: function(context?: any): Promise { + return {} as any; + }, + addGeneratedColumn: async function(context: any): Promise { + return {} as any; + }, + addGeneratedToColumn: async function(context: any): Promise { + return {} as any; + }, + dropGeneratedConstraint: async function(context: any): Promise { + return {} as any; + }, + alterGeneratedConstraint: async function(context: any): Promise { + return {} as any; + }, + createTableWithGeneratedConstraint: function(context?: any): Promise { + return {} as any; + }, + createCompositePrimaryKey: async function(context: any): Promise { + const schema1 = {}; + + const schema2 = { + table: singlestoreTable('table', { + col1: int('col1').notNull(), + col2: int('col2').notNull(), + }, (t) => ({ + pk: primaryKey({ + columns: [t.col1, t.col2], + }), + })), + }; + + const { statements, sqlStatements } = await diffTestSchemasPushSingleStore( + context.client as Connection, + schema1, + schema2, + [], + 'drizzle', + false, + ); + + expect(statements).toStrictEqual([ + { + type: 'create_table', + tableName: 'table', + schema: undefined, + internals: { + indexes: {}, + tables: {}, + }, + compositePKs: ['table_col1_col2_pk;col1,col2'], + compositePkName: 'table_col1_col2_pk', + uniqueConstraints: [], + columns: [ + { name: 'col1', type: 'int', primaryKey: false, notNull: true, autoincrement: false }, + { name: 'col2', type: 'int', primaryKey: false, notNull: true, autoincrement: false }, + ], + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'CREATE TABLE `table` (\n\t`col1` int NOT NULL,\n\t`col2` int NOT NULL,\n\tCONSTRAINT `table_col1_col2_pk` PRIMARY KEY(`col1`,`col2`)\n);\n', + ]); + }, + renameTableWithCompositePrimaryKey: async function(context?: any): Promise { + const productsCategoriesTable = (tableName: string) => { + return singlestoreTable(tableName, { + productId: varchar('product_id', { length: 10 }).notNull(), + categoryId: varchar('category_id', { length: 10 }).notNull(), + }, (t) => ({ + pk: primaryKey({ + columns: [t.productId, t.categoryId], + }), + })); + }; + + const schema1 = { + table: productsCategoriesTable('products_categories'), + }; + const schema2 = { + test: productsCategoriesTable('products_to_categories'), + }; + + const { sqlStatements } = await diffTestSchemasPushSingleStore( + context.client as Connection, + schema1, + schema2, + ['public.products_categories->public.products_to_categories'], + 'drizzle', + false, + ); + + // It's not possible to create/alter/drop primary keys in SingleStore + expect(sqlStatements).toStrictEqual([ + 'RENAME TABLE `products_categories` TO `products_to_categories`;', + ]); + + await context.client.query(`DROP TABLE \`products_categories\``); + }, +}; + +run( + singlestoreSuite, + async (context: any) => { + const connectionString = process.env.SINGLESTORE_CONNECTION_STRING + ?? (await createDockerDB(context)); + + const sleep = 1000; + let timeLeft = 20000; + let connected = false; + let lastError: unknown | undefined; + do { + try { + context.client = await createConnection(connectionString); + await context.client.connect(); + connected = true; + break; + } catch (e) { + lastError = e; + await new Promise((resolve) => setTimeout(resolve, sleep)); + timeLeft -= sleep; + } + } while (timeLeft > 0); + if (!connected) { + console.error('Cannot connect to SingleStore'); + await context.client?.end().catch(console.error); + await context.singlestoreContainer?.stop().catch(console.error); + throw lastError; + } + + await context.client.query(`DROP DATABASE IF EXISTS \`drizzle\`;`); + await context.client.query('CREATE DATABASE drizzle;'); + await context.client.query('USE drizzle;'); + }, + async (context: any) => { + await context.client?.end().catch(console.error); + await context.singlestoreContainer?.stop().catch(console.error); + }, +); diff --git a/drizzle-kit/tests/schemaDiffer.ts b/drizzle-kit/tests/schemaDiffer.ts index adc7aecbf..9c7f212aa 100644 --- a/drizzle-kit/tests/schemaDiffer.ts +++ b/drizzle-kit/tests/schemaDiffer.ts @@ -18,6 +18,7 @@ import { PgTable, PgView, } from 'drizzle-orm/pg-core'; +import { SingleStoreSchema, SingleStoreTable } from 'drizzle-orm/singlestore-core'; import { SQLiteTable, SQLiteView } from 'drizzle-orm/sqlite-core'; import * as fs from 'fs'; import { Connection } from 'mysql2/promise'; @@ -42,22 +43,28 @@ import { Entities } from 'src/cli/validations/cli'; import { CasingType } from 'src/cli/validations/common'; import { schemaToTypeScript as schemaToTypeScriptMySQL } from 'src/introspect-mysql'; import { schemaToTypeScript } from 'src/introspect-pg'; +import { schemaToTypeScript as schemaToTypeScriptSingleStore } from 'src/introspect-singlestore'; import { schemaToTypeScript as schemaToTypeScriptSQLite } from 'src/introspect-sqlite'; import { prepareFromMySqlImports } from 'src/serializer/mysqlImports'; import { mysqlSchema, squashMysqlScheme, ViewSquashed } from 'src/serializer/mysqlSchema'; -import { generateMySqlSnapshot } from 'src/serializer/mysqlSerializer'; -import { fromDatabase as fromMySqlDatabase } from 'src/serializer/mysqlSerializer'; +import { fromDatabase as fromMySqlDatabase, generateMySqlSnapshot } from 'src/serializer/mysqlSerializer'; import { prepareFromPgImports } from 'src/serializer/pgImports'; -import { pgSchema, PgSquasher, Policy, Role, squashPgScheme, View } from 'src/serializer/pgSchema'; +import { pgSchema, Policy, Role, squashPgScheme, View } from 'src/serializer/pgSchema'; import { fromDatabase, generatePgSnapshot } from 'src/serializer/pgSerializer'; +import { prepareFromSingleStoreImports } from 'src/serializer/singlestoreImports'; +import { singlestoreSchema, squashSingleStoreScheme } from 'src/serializer/singlestoreSchema'; +import { + fromDatabase as fromSingleStoreDatabase, + generateSingleStoreSnapshot, +} from 'src/serializer/singlestoreSerializer'; import { prepareFromSqliteImports } from 'src/serializer/sqliteImports'; import { sqliteSchema, squashSqliteScheme, View as SqliteView } from 'src/serializer/sqliteSchema'; -import { fromDatabase as fromSqliteDatabase } from 'src/serializer/sqliteSerializer'; -import { generateSqliteSnapshot } from 'src/serializer/sqliteSerializer'; +import { fromDatabase as fromSqliteDatabase, generateSqliteSnapshot } from 'src/serializer/sqliteSerializer'; import { applyLibSQLSnapshotsDiff, applyMysqlSnapshotsDiff, applyPgSnapshotsDiff, + applySingleStoreSnapshotsDiff, applySqliteSnapshotsDiff, Column, ColumnsResolverInput, @@ -78,15 +85,33 @@ import { export type PostgresSchema = Record< string, - PgTable | PgEnum | PgSchema | PgSequence | PgView | PgMaterializedView | PgRole | PgPolicy + | PgTable + | PgEnum + | PgSchema + | PgSequence + | PgView + | PgMaterializedView + | PgRole + | PgPolicy +>; +export type MysqlSchema = Record< + string, + MySqlTable | MySqlSchema | MySqlView >; -export type MysqlSchema = Record | MySqlSchema | MySqlView>; export type SqliteSchema = Record | SQLiteView>; +export type SinglestoreSchema = Record< + string, + SingleStoreTable | SingleStoreSchema /* | SingleStoreView */ +>; export const testSchemasResolver = (renames: Set) => async (input: ResolverInput): Promise> => { try { - if (input.created.length === 0 || input.deleted.length === 0 || renames.size === 0) { + if ( + input.created.length === 0 + || input.deleted.length === 0 + || renames.size === 0 + ) { return { created: input.created, renamed: [], @@ -138,273 +163,297 @@ export const testSchemasResolver = } }; -export const testSequencesResolver = - (renames: Set) => async (input: ResolverInput): Promise> => { - try { - if (input.created.length === 0 || input.deleted.length === 0 || renames.size === 0) { - return { - created: input.created, - moved: [], - renamed: [], - deleted: input.deleted, - }; - } +export const testSequencesResolver = (renames: Set) => +async ( + input: ResolverInput, +): Promise> => { + try { + if ( + input.created.length === 0 + || input.deleted.length === 0 + || renames.size === 0 + ) { + return { + created: input.created, + moved: [], + renamed: [], + deleted: input.deleted, + }; + } - let createdSequences = [...input.created]; - let deletedSequences = [...input.deleted]; + let createdSequences = [...input.created]; + let deletedSequences = [...input.deleted]; - const result: { - created: Sequence[]; - moved: { name: string; schemaFrom: string; schemaTo: string }[]; - renamed: { from: Sequence; to: Sequence }[]; - deleted: Sequence[]; - } = { created: [], renamed: [], deleted: [], moved: [] }; + const result: { + created: Sequence[]; + moved: { name: string; schemaFrom: string; schemaTo: string }[]; + renamed: { from: Sequence; to: Sequence }[]; + deleted: Sequence[]; + } = { created: [], renamed: [], deleted: [], moved: [] }; - for (let rename of renames) { - const [from, to] = rename.split('->'); + for (let rename of renames) { + const [from, to] = rename.split('->'); + + const idxFrom = deletedSequences.findIndex((it) => { + return `${it.schema || 'public'}.${it.name}` === from; + }); - const idxFrom = deletedSequences.findIndex((it) => { - return `${it.schema || 'public'}.${it.name}` === from; + if (idxFrom >= 0) { + const idxTo = createdSequences.findIndex((it) => { + return `${it.schema || 'public'}.${it.name}` === to; }); - if (idxFrom >= 0) { - const idxTo = createdSequences.findIndex((it) => { - return `${it.schema || 'public'}.${it.name}` === to; + const tableFrom = deletedSequences[idxFrom]; + const tableTo = createdSequences[idxFrom]; + + if (tableFrom.schema !== tableTo.schema) { + result.moved.push({ + name: tableFrom.name, + schemaFrom: tableFrom.schema, + schemaTo: tableTo.schema, }); + } - const tableFrom = deletedSequences[idxFrom]; - const tableTo = createdSequences[idxFrom]; - - if (tableFrom.schema !== tableTo.schema) { - result.moved.push({ - name: tableFrom.name, - schemaFrom: tableFrom.schema, - schemaTo: tableTo.schema, - }); - } - - if (tableFrom.name !== tableTo.name) { - result.renamed.push({ - from: deletedSequences[idxFrom], - to: createdSequences[idxTo], - }); - } - - delete createdSequences[idxTo]; - delete deletedSequences[idxFrom]; - - createdSequences = createdSequences.filter(Boolean); - deletedSequences = deletedSequences.filter(Boolean); + if (tableFrom.name !== tableTo.name) { + result.renamed.push({ + from: deletedSequences[idxFrom], + to: createdSequences[idxTo], + }); } + + delete createdSequences[idxTo]; + delete deletedSequences[idxFrom]; + + createdSequences = createdSequences.filter(Boolean); + deletedSequences = deletedSequences.filter(Boolean); } + } - result.created = createdSequences; - result.deleted = deletedSequences; + result.created = createdSequences; + result.deleted = deletedSequences; - return result; - } catch (e) { - console.error(e); - throw e; + return result; + } catch (e) { + console.error(e); + throw e; + } +}; + +export const testEnumsResolver = (renames: Set) => +async ( + input: ResolverInput, +): Promise> => { + try { + if ( + input.created.length === 0 + || input.deleted.length === 0 + || renames.size === 0 + ) { + return { + created: input.created, + moved: [], + renamed: [], + deleted: input.deleted, + }; } - }; -export const testEnumsResolver = - (renames: Set) => async (input: ResolverInput): Promise> => { - try { - if (input.created.length === 0 || input.deleted.length === 0 || renames.size === 0) { - return { - created: input.created, - moved: [], - renamed: [], - deleted: input.deleted, - }; - } + let createdEnums = [...input.created]; + let deletedEnums = [...input.deleted]; - let createdEnums = [...input.created]; - let deletedEnums = [...input.deleted]; + const result: { + created: Enum[]; + moved: { name: string; schemaFrom: string; schemaTo: string }[]; + renamed: { from: Enum; to: Enum }[]; + deleted: Enum[]; + } = { created: [], renamed: [], deleted: [], moved: [] }; - const result: { - created: Enum[]; - moved: { name: string; schemaFrom: string; schemaTo: string }[]; - renamed: { from: Enum; to: Enum }[]; - deleted: Enum[]; - } = { created: [], renamed: [], deleted: [], moved: [] }; + for (let rename of renames) { + const [from, to] = rename.split('->'); - for (let rename of renames) { - const [from, to] = rename.split('->'); + const idxFrom = deletedEnums.findIndex((it) => { + return `${it.schema || 'public'}.${it.name}` === from; + }); - const idxFrom = deletedEnums.findIndex((it) => { - return `${it.schema || 'public'}.${it.name}` === from; + if (idxFrom >= 0) { + const idxTo = createdEnums.findIndex((it) => { + return `${it.schema || 'public'}.${it.name}` === to; }); - if (idxFrom >= 0) { - const idxTo = createdEnums.findIndex((it) => { - return `${it.schema || 'public'}.${it.name}` === to; + const tableFrom = deletedEnums[idxFrom]; + const tableTo = createdEnums[idxFrom]; + + if (tableFrom.schema !== tableTo.schema) { + result.moved.push({ + name: tableFrom.name, + schemaFrom: tableFrom.schema, + schemaTo: tableTo.schema, }); + } - const tableFrom = deletedEnums[idxFrom]; - const tableTo = createdEnums[idxFrom]; - - if (tableFrom.schema !== tableTo.schema) { - result.moved.push({ - name: tableFrom.name, - schemaFrom: tableFrom.schema, - schemaTo: tableTo.schema, - }); - } - - if (tableFrom.name !== tableTo.name) { - result.renamed.push({ - from: deletedEnums[idxFrom], - to: createdEnums[idxTo], - }); - } - - delete createdEnums[idxTo]; - delete deletedEnums[idxFrom]; - - createdEnums = createdEnums.filter(Boolean); - deletedEnums = deletedEnums.filter(Boolean); + if (tableFrom.name !== tableTo.name) { + result.renamed.push({ + from: deletedEnums[idxFrom], + to: createdEnums[idxTo], + }); } + + delete createdEnums[idxTo]; + delete deletedEnums[idxFrom]; + + createdEnums = createdEnums.filter(Boolean); + deletedEnums = deletedEnums.filter(Boolean); } + } - result.created = createdEnums; - result.deleted = deletedEnums; + result.created = createdEnums; + result.deleted = deletedEnums; - return result; - } catch (e) { - console.error(e); - throw e; + return result; + } catch (e) { + console.error(e); + throw e; + } +}; + +export const testTablesResolver = (renames: Set) => +async ( + input: ResolverInput
, +): Promise> => { + try { + if ( + input.created.length === 0 + || input.deleted.length === 0 + || renames.size === 0 + ) { + return { + created: input.created, + moved: [], + renamed: [], + deleted: input.deleted, + }; } - }; -export const testTablesResolver = - (renames: Set) => async (input: ResolverInput
): Promise> => { - try { - if (input.created.length === 0 || input.deleted.length === 0 || renames.size === 0) { - return { - created: input.created, - moved: [], - renamed: [], - deleted: input.deleted, - }; - } + let createdTables = [...input.created]; + let deletedTables = [...input.deleted]; - let createdTables = [...input.created]; - let deletedTables = [...input.deleted]; + const result: { + created: Table[]; + moved: { name: string; schemaFrom: string; schemaTo: string }[]; + renamed: { from: Table; to: Table }[]; + deleted: Table[]; + } = { created: [], renamed: [], deleted: [], moved: [] }; - const result: { - created: Table[]; - moved: { name: string; schemaFrom: string; schemaTo: string }[]; - renamed: { from: Table; to: Table }[]; - deleted: Table[]; - } = { created: [], renamed: [], deleted: [], moved: [] }; + for (let rename of renames) { + const [from, to] = rename.split('->'); - for (let rename of renames) { - const [from, to] = rename.split('->'); + const idxFrom = deletedTables.findIndex((it) => { + return `${it.schema || 'public'}.${it.name}` === from; + }); - const idxFrom = deletedTables.findIndex((it) => { - return `${it.schema || 'public'}.${it.name}` === from; + if (idxFrom >= 0) { + const idxTo = createdTables.findIndex((it) => { + return `${it.schema || 'public'}.${it.name}` === to; }); - if (idxFrom >= 0) { - const idxTo = createdTables.findIndex((it) => { - return `${it.schema || 'public'}.${it.name}` === to; + const tableFrom = deletedTables[idxFrom]; + const tableTo = createdTables[idxFrom]; + + if (tableFrom.schema !== tableTo.schema) { + result.moved.push({ + name: tableFrom.name, + schemaFrom: tableFrom.schema, + schemaTo: tableTo.schema, }); + } - const tableFrom = deletedTables[idxFrom]; - const tableTo = createdTables[idxFrom]; - - if (tableFrom.schema !== tableTo.schema) { - result.moved.push({ - name: tableFrom.name, - schemaFrom: tableFrom.schema, - schemaTo: tableTo.schema, - }); - } - - if (tableFrom.name !== tableTo.name) { - result.renamed.push({ - from: deletedTables[idxFrom], - to: createdTables[idxTo], - }); - } - - delete createdTables[idxTo]; - delete deletedTables[idxFrom]; - - createdTables = createdTables.filter(Boolean); - deletedTables = deletedTables.filter(Boolean); + if (tableFrom.name !== tableTo.name) { + result.renamed.push({ + from: deletedTables[idxFrom], + to: createdTables[idxTo], + }); } + + delete createdTables[idxTo]; + delete deletedTables[idxFrom]; + + createdTables = createdTables.filter(Boolean); + deletedTables = deletedTables.filter(Boolean); } + } - result.created = createdTables; - result.deleted = deletedTables; + result.created = createdTables; + result.deleted = deletedTables; - return result; - } catch (e) { - console.error(e); - throw e; + return result; + } catch (e) { + console.error(e); + throw e; + } +}; + +export const testColumnsResolver = (renames: Set) => +async ( + input: ColumnsResolverInput, +): Promise> => { + try { + if ( + input.created.length === 0 + || input.deleted.length === 0 + || renames.size === 0 + ) { + return { + tableName: input.tableName, + schema: input.schema, + created: input.created, + renamed: [], + deleted: input.deleted, + }; } - }; -export const testColumnsResolver = - (renames: Set) => async (input: ColumnsResolverInput): Promise> => { - try { - if (input.created.length === 0 || input.deleted.length === 0 || renames.size === 0) { - return { - tableName: input.tableName, - schema: input.schema, - created: input.created, - renamed: [], - deleted: input.deleted, - }; - } + let createdColumns = [...input.created]; + let deletedColumns = [...input.deleted]; - let createdColumns = [...input.created]; - let deletedColumns = [...input.deleted]; + const renamed: { from: Column; to: Column }[] = []; - const renamed: { from: Column; to: Column }[] = []; + const schema = input.schema || 'public'; - const schema = input.schema || 'public'; + for (let rename of renames) { + const [from, to] = rename.split('->'); - for (let rename of renames) { - const [from, to] = rename.split('->'); + const idxFrom = deletedColumns.findIndex((it) => { + return `${schema}.${input.tableName}.${it.name}` === from; + }); - const idxFrom = deletedColumns.findIndex((it) => { - return `${schema}.${input.tableName}.${it.name}` === from; + if (idxFrom >= 0) { + const idxTo = createdColumns.findIndex((it) => { + return `${schema}.${input.tableName}.${it.name}` === to; }); - if (idxFrom >= 0) { - const idxTo = createdColumns.findIndex((it) => { - return `${schema}.${input.tableName}.${it.name}` === to; - }); - - renamed.push({ - from: deletedColumns[idxFrom], - to: createdColumns[idxTo], - }); + renamed.push({ + from: deletedColumns[idxFrom], + to: createdColumns[idxTo], + }); - delete createdColumns[idxTo]; - delete deletedColumns[idxFrom]; + delete createdColumns[idxTo]; + delete deletedColumns[idxFrom]; - createdColumns = createdColumns.filter(Boolean); - deletedColumns = deletedColumns.filter(Boolean); - } + createdColumns = createdColumns.filter(Boolean); + deletedColumns = deletedColumns.filter(Boolean); } - - return { - tableName: input.tableName, - schema: input.schema, - created: createdColumns, - deleted: deletedColumns, - renamed, - }; - } catch (e) { - console.error(e); - throw e; } - }; + + return { + tableName: input.tableName, + schema: input.schema, + created: createdColumns, + deleted: deletedColumns, + renamed, + }; + } catch (e) { + console.error(e); + throw e; + } +}; export const testPolicyResolver = (renames: Set) => async ( @@ -586,208 +635,301 @@ async ( } }; -export const testViewsResolver = - (renames: Set) => async (input: ResolverInput): Promise> => { - try { - if (input.created.length === 0 || input.deleted.length === 0 || renames.size === 0) { - return { - created: input.created, - moved: [], - renamed: [], - deleted: input.deleted, - }; - } +export const testViewsResolver = (renames: Set) => +async ( + input: ResolverInput, +): Promise> => { + try { + if ( + input.created.length === 0 + || input.deleted.length === 0 + || renames.size === 0 + ) { + return { + created: input.created, + moved: [], + renamed: [], + deleted: input.deleted, + }; + } - let createdViews = [...input.created]; - let deletedViews = [...input.deleted]; + let createdViews = [...input.created]; + let deletedViews = [...input.deleted]; - const result: { - created: View[]; - moved: { name: string; schemaFrom: string; schemaTo: string }[]; - renamed: { from: View; to: View }[]; - deleted: View[]; - } = { created: [], renamed: [], deleted: [], moved: [] }; + const result: { + created: View[]; + moved: { name: string; schemaFrom: string; schemaTo: string }[]; + renamed: { from: View; to: View }[]; + deleted: View[]; + } = { created: [], renamed: [], deleted: [], moved: [] }; - for (let rename of renames) { - const [from, to] = rename.split('->'); + for (let rename of renames) { + const [from, to] = rename.split('->'); - const idxFrom = deletedViews.findIndex((it) => { - return `${it.schema || 'public'}.${it.name}` === from; + const idxFrom = deletedViews.findIndex((it) => { + return `${it.schema || 'public'}.${it.name}` === from; + }); + + if (idxFrom >= 0) { + const idxTo = createdViews.findIndex((it) => { + return `${it.schema || 'public'}.${it.name}` === to; }); - if (idxFrom >= 0) { - const idxTo = createdViews.findIndex((it) => { - return `${it.schema || 'public'}.${it.name}` === to; - }); + const viewFrom = deletedViews[idxFrom]; + const viewTo = createdViews[idxFrom]; - const viewFrom = deletedViews[idxFrom]; - const viewTo = createdViews[idxFrom]; - - if (viewFrom.schema !== viewTo.schema) { - result.moved.push({ - name: viewFrom.name, - schemaFrom: viewFrom.schema, - schemaTo: viewTo.schema, - }); - } - - if (viewFrom.name !== viewTo.name) { - result.renamed.push({ - from: deletedViews[idxFrom], - to: createdViews[idxTo], - }); - } - - delete createdViews[idxTo]; - delete deletedViews[idxFrom]; - - createdViews = createdViews.filter(Boolean); - deletedViews = deletedViews.filter(Boolean); + if (viewFrom.schema !== viewTo.schema) { + result.moved.push({ + name: viewFrom.name, + schemaFrom: viewFrom.schema, + schemaTo: viewTo.schema, + }); } - } - result.created = createdViews; - result.deleted = deletedViews; + if (viewFrom.name !== viewTo.name) { + result.renamed.push({ + from: deletedViews[idxFrom], + to: createdViews[idxTo], + }); + } - return result; - } catch (e) { - console.error(e); - throw e; - } - }; + delete createdViews[idxTo]; + delete deletedViews[idxFrom]; -export const testViewsResolverMySql = - (renames: Set) => - async (input: ResolverInput): Promise> => { - try { - if (input.created.length === 0 || input.deleted.length === 0 || renames.size === 0) { - return { - created: input.created, - moved: [], - renamed: [], - deleted: input.deleted, - }; + createdViews = createdViews.filter(Boolean); + deletedViews = deletedViews.filter(Boolean); } + } - let createdViews = [...input.created]; - let deletedViews = [...input.deleted]; - - const result: { - created: ViewSquashed[]; - moved: { name: string; schemaFrom: string; schemaTo: string }[]; - renamed: { from: ViewSquashed; to: ViewSquashed }[]; - deleted: ViewSquashed[]; - } = { created: [], renamed: [], deleted: [], moved: [] }; + result.created = createdViews; + result.deleted = deletedViews; - for (let rename of renames) { - const [from, to] = rename.split('->'); + return result; + } catch (e) { + console.error(e); + throw e; + } +}; - const idxFrom = deletedViews.findIndex((it) => { - return `${it.schema || 'public'}.${it.name}` === from; +export const testViewsResolverMySql = (renames: Set) => +async ( + input: ResolverInput, +): Promise> => { + try { + if ( + input.created.length === 0 + || input.deleted.length === 0 + || renames.size === 0 + ) { + return { + created: input.created, + moved: [], + renamed: [], + deleted: input.deleted, + }; + } + + let createdViews = [...input.created]; + let deletedViews = [...input.deleted]; + + const result: { + created: ViewSquashed[]; + moved: { name: string; schemaFrom: string; schemaTo: string }[]; + renamed: { from: ViewSquashed; to: ViewSquashed }[]; + deleted: ViewSquashed[]; + } = { created: [], renamed: [], deleted: [], moved: [] }; + + for (let rename of renames) { + const [from, to] = rename.split('->'); + + const idxFrom = deletedViews.findIndex((it) => { + return `${it.schema || 'public'}.${it.name}` === from; + }); + + if (idxFrom >= 0) { + const idxTo = createdViews.findIndex((it) => { + return `${it.schema || 'public'}.${it.name}` === to; }); - if (idxFrom >= 0) { - const idxTo = createdViews.findIndex((it) => { - return `${it.schema || 'public'}.${it.name}` === to; + const viewFrom = deletedViews[idxFrom]; + const viewTo = createdViews[idxFrom]; + + if (viewFrom.schema !== viewTo.schema) { + result.moved.push({ + name: viewFrom.name, + schemaFrom: viewFrom.schema, + schemaTo: viewTo.schema, }); + } - const viewFrom = deletedViews[idxFrom]; - const viewTo = createdViews[idxFrom]; - - if (viewFrom.schema !== viewTo.schema) { - result.moved.push({ - name: viewFrom.name, - schemaFrom: viewFrom.schema, - schemaTo: viewTo.schema, - }); - } - - if (viewFrom.name !== viewTo.name) { - result.renamed.push({ - from: deletedViews[idxFrom], - to: createdViews[idxTo], - }); - } - - delete createdViews[idxTo]; - delete deletedViews[idxFrom]; - - createdViews = createdViews.filter(Boolean); - deletedViews = deletedViews.filter(Boolean); + if (viewFrom.name !== viewTo.name) { + result.renamed.push({ + from: deletedViews[idxFrom], + to: createdViews[idxTo], + }); } + + delete createdViews[idxTo]; + delete deletedViews[idxFrom]; + + createdViews = createdViews.filter(Boolean); + deletedViews = deletedViews.filter(Boolean); } + } - result.created = createdViews; - result.deleted = deletedViews; + result.created = createdViews; + result.deleted = deletedViews; - return result; - } catch (e) { - console.error(e); - throw e; + return result; + } catch (e) { + console.error(e); + throw e; + } +}; + +export const testViewsResolverSingleStore = (renames: Set) => +async ( + input: ResolverInput, +): Promise> => { + try { + if ( + input.created.length === 0 + || input.deleted.length === 0 + || renames.size === 0 + ) { + return { + created: input.created, + moved: [], + renamed: [], + deleted: input.deleted, + }; } - }; -export const testViewsResolverSqlite = - (renames: Set) => async (input: ResolverInput): Promise> => { - try { - if (input.created.length === 0 || input.deleted.length === 0 || renames.size === 0) { - return { - created: input.created, - moved: [], - renamed: [], - deleted: input.deleted, - }; - } + let createdViews = [...input.created]; + let deletedViews = [...input.deleted]; - let createdViews = [...input.created]; - let deletedViews = [...input.deleted]; + const result: { + created: ViewSquashed[]; + moved: { name: string; schemaFrom: string; schemaTo: string }[]; + renamed: { from: ViewSquashed; to: ViewSquashed }[]; + deleted: ViewSquashed[]; + } = { created: [], renamed: [], deleted: [], moved: [] }; - const result: { - created: SqliteView[]; - moved: { name: string; schemaFrom: string; schemaTo: string }[]; - renamed: { from: SqliteView; to: SqliteView }[]; - deleted: SqliteView[]; - } = { created: [], renamed: [], deleted: [], moved: [] }; + for (let rename of renames) { + const [from, to] = rename.split('->'); - for (let rename of renames) { - const [from, to] = rename.split('->'); + const idxFrom = deletedViews.findIndex((it) => { + return `${it.schema || 'public'}.${it.name}` === from; + }); - const idxFrom = deletedViews.findIndex((it) => { - return it.name === from; + if (idxFrom >= 0) { + const idxTo = createdViews.findIndex((it) => { + return `${it.schema || 'public'}.${it.name}` === to; }); - if (idxFrom >= 0) { - const idxTo = createdViews.findIndex((it) => { - return it.name === to; + const viewFrom = deletedViews[idxFrom]; + const viewTo = createdViews[idxFrom]; + + if (viewFrom.schema !== viewTo.schema) { + result.moved.push({ + name: viewFrom.name, + schemaFrom: viewFrom.schema, + schemaTo: viewTo.schema, + }); + } + + if (viewFrom.name !== viewTo.name) { + result.renamed.push({ + from: deletedViews[idxFrom], + to: createdViews[idxTo], }); + } + + delete createdViews[idxTo]; + delete deletedViews[idxFrom]; + + createdViews = createdViews.filter(Boolean); + deletedViews = deletedViews.filter(Boolean); + } + } + + result.created = createdViews; + result.deleted = deletedViews; - const viewFrom = deletedViews[idxFrom]; - const viewTo = createdViews[idxFrom]; + return result; + } catch (e) { + console.error(e); + throw e; + } +}; - if (viewFrom.name !== viewTo.name) { - result.renamed.push({ - from: deletedViews[idxFrom], - to: createdViews[idxTo], - }); - } +export const testViewsResolverSqlite = (renames: Set) => +async ( + input: ResolverInput, +): Promise> => { + try { + if ( + input.created.length === 0 + || input.deleted.length === 0 + || renames.size === 0 + ) { + return { + created: input.created, + moved: [], + renamed: [], + deleted: input.deleted, + }; + } + + let createdViews = [...input.created]; + let deletedViews = [...input.deleted]; + + const result: { + created: SqliteView[]; + moved: { name: string; schemaFrom: string; schemaTo: string }[]; + renamed: { from: SqliteView; to: SqliteView }[]; + deleted: SqliteView[]; + } = { created: [], renamed: [], deleted: [], moved: [] }; + + for (let rename of renames) { + const [from, to] = rename.split('->'); + + const idxFrom = deletedViews.findIndex((it) => { + return it.name === from; + }); + + if (idxFrom >= 0) { + const idxTo = createdViews.findIndex((it) => { + return it.name === to; + }); - delete createdViews[idxTo]; - delete deletedViews[idxFrom]; + const viewFrom = deletedViews[idxFrom]; + const viewTo = createdViews[idxFrom]; - createdViews = createdViews.filter(Boolean); - deletedViews = deletedViews.filter(Boolean); + if (viewFrom.name !== viewTo.name) { + result.renamed.push({ + from: deletedViews[idxFrom], + to: createdViews[idxTo], + }); } - } - result.created = createdViews; - result.deleted = deletedViews; + delete createdViews[idxTo]; + delete deletedViews[idxFrom]; - return result; - } catch (e) { - console.error(e); - throw e; + createdViews = createdViews.filter(Boolean); + deletedViews = deletedViews.filter(Boolean); + } } - }; + + result.created = createdViews; + result.deleted = deletedViews; + + return result; + } catch (e) { + console.error(e); + throw e; + } +}; export const diffTestSchemasPush = async ( client: PGlite, @@ -798,13 +940,19 @@ export const diffTestSchemasPush = async ( schemas: string[] = ['public'], casing?: CasingType | undefined, entities?: Entities, - sqlStatementsToRun: { before?: string[]; after?: string[]; runApply?: boolean } = { + sqlStatementsToRun: { + before?: string[]; + after?: string[]; + runApply?: boolean; + } = { before: [], after: [], runApply: true, }, ) => { - const shouldRunApply = sqlStatementsToRun.runApply === undefined ? true : sqlStatementsToRun.runApply; + const shouldRunApply = sqlStatementsToRun.runApply === undefined + ? true + : sqlStatementsToRun.runApply; for (const st of sqlStatementsToRun.before ?? []) { await client.query(st); @@ -837,6 +985,19 @@ export const diffTestSchemasPush = async ( ); } + // do introspect into PgSchemaInternal + const introspectedSchema = await fromDatabase( + { + query: async (query: string, values?: any[] | undefined) => { + const res = await client.query(query, values); + return res.rows as any[]; + }, + }, + undefined, + schemas, + entities, + ); + const leftTables = Object.values(right).filter((it) => is(it, PgTable)) as PgTable[]; const leftSchemas = Object.values(right).filter((it) => is(it, PgSchema)) as PgSchema[]; @@ -865,21 +1026,6 @@ export const diffTestSchemasPush = async ( casing, ); - // do introspect into PgSchemaInternal - const introspectedSchema = await fromDatabase( - { - query: async (query: string, values?: any[] | undefined) => { - const res = await client.query(query, values); - return res.rows as any[]; - }, - }, - undefined, - schemas, - entities, - undefined, - serialized2, - ); - const { version: v1, dialect: d1, ...rest1 } = introspectedSchema; const { version: v2, dialect: d2, ...rest2 } = serialized2; @@ -975,7 +1121,10 @@ export const diffTestSchemasPush = async ( } }; -export const applyPgDiffs = async (sn: PostgresSchema, casing: CasingType | undefined) => { +export const applyPgDiffs = async ( + sn: PostgresSchema, + casing: CasingType | undefined, +) => { const dryRun = { version: '7', dialect: 'postgresql', @@ -1122,81 +1271,366 @@ export const diffTestSchemas = async ( const { version: v2, dialect: d2, ...rest2 } = serialized2; const sch1 = { - version: '7', - dialect: 'postgresql', + version: '7', + dialect: 'postgresql', + id: '0', + prevId: '0', + ...rest1, + } as const; + + const sch2 = { + version: '7', + dialect: 'postgresql', + id: '0', + prevId: '0', + ...rest2, + } as const; + + const sn1 = squashPgScheme(sch1); + const sn2 = squashPgScheme(sch2); + + const validatedPrev = pgSchema.parse(sch1); + const validatedCur = pgSchema.parse(sch2); + + const renames = new Set(renamesArr); + + if (!cli) { + const { sqlStatements, statements } = await applyPgSnapshotsDiff( + sn1, + sn2, + testSchemasResolver(renames), + testEnumsResolver(renames), + testSequencesResolver(renames), + testPolicyResolver(renames), + testIndPolicyResolver(renames), + testRolesResolver(renames), + testTablesResolver(renames), + testColumnsResolver(renames), + testViewsResolver(renames), + validatedPrev, + validatedCur, + ); + return { sqlStatements, statements }; + } else { + const { sqlStatements, statements } = await applyPgSnapshotsDiff( + sn1, + sn2, + schemasResolver, + enumsResolver, + sequencesResolver, + policyResolver, + indPolicyResolver, + roleResolver, + tablesResolver, + columnsResolver, + viewsResolver, + validatedPrev, + validatedCur, + ); + return { sqlStatements, statements }; + } +}; + +export const diffTestSchemasPushMysql = async ( + client: Connection, + left: MysqlSchema, + right: MysqlSchema, + renamesArr: string[], + schema: string, + cli: boolean = false, + casing?: CasingType | undefined, +) => { + const { sqlStatements } = await applyMySqlDiffs(left, casing); + for (const st of sqlStatements) { + await client.query(st); + } + // do introspect into PgSchemaInternal + const introspectedSchema = await fromMySqlDatabase( + { + query: async (sql: string, params?: any[]) => { + const res = await client.execute(sql, params); + return res[0] as any; + }, + }, + schema, + ); + + const leftTables = Object.values(right).filter((it) => is(it, MySqlTable)) as MySqlTable[]; + + const leftViews = Object.values(right).filter((it) => is(it, MySqlView)) as MySqlView[]; + + const serialized2 = generateMySqlSnapshot(leftTables, leftViews, casing); + + const { version: v1, dialect: d1, ...rest1 } = introspectedSchema; + const { version: v2, dialect: d2, ...rest2 } = serialized2; + + const sch1 = { + version: '5', + dialect: 'mysql', + id: '0', + prevId: '0', + ...rest1, + } as const; + + const sch2 = { + version: '5', + dialect: 'mysql', + id: '0', + prevId: '0', + ...rest2, + } as const; + + const sn1 = squashMysqlScheme(sch1); + const sn2 = squashMysqlScheme(sch2); + + const validatedPrev = mysqlSchema.parse(sch1); + const validatedCur = mysqlSchema.parse(sch2); + + const renames = new Set(renamesArr); + + if (!cli) { + const { sqlStatements, statements } = await applyMysqlSnapshotsDiff( + sn1, + sn2, + testTablesResolver(renames), + testColumnsResolver(renames), + testViewsResolverMySql(renames), + validatedPrev, + validatedCur, + 'push', + ); + return { sqlStatements, statements }; + } else { + const { sqlStatements, statements } = await applyMysqlSnapshotsDiff( + sn1, + sn2, + tablesResolver, + columnsResolver, + mySqlViewsResolver, + validatedPrev, + validatedCur, + 'push', + ); + return { sqlStatements, statements }; + } +}; + +export const applyMySqlDiffs = async ( + sn: MysqlSchema, + casing: CasingType | undefined, +) => { + const dryRun = { + version: '5', + dialect: 'mysql', + id: '0', + prevId: '0', + views: {}, + tables: {}, + enums: {}, + schemas: {}, + _meta: { + schemas: {}, + tables: {}, + columns: {}, + }, + } as const; + + const tables = Object.values(sn).filter((it) => is(it, MySqlTable)) as MySqlTable[]; + + const views = Object.values(sn).filter((it) => is(it, MySqlView)) as MySqlView[]; + + const serialized1 = generateMySqlSnapshot(tables, views, casing); + + const { version: v1, dialect: d1, ...rest1 } = serialized1; + + const sch1 = { + version: '5', + dialect: 'mysql', + id: '0', + prevId: '0', + ...rest1, + } as const; + + const sn1 = squashMysqlScheme(sch1); + + const validatedPrev = mysqlSchema.parse(dryRun); + const validatedCur = mysqlSchema.parse(sch1); + + const { sqlStatements, statements } = await applyMysqlSnapshotsDiff( + dryRun, + sn1, + testTablesResolver(new Set()), + testColumnsResolver(new Set()), + testViewsResolverMySql(new Set()), + validatedPrev, + validatedCur, + ); + return { sqlStatements, statements }; +}; + +export const diffTestSchemasMysql = async ( + left: MysqlSchema, + right: MysqlSchema, + renamesArr: string[], + cli: boolean = false, + casing?: CasingType | undefined, +) => { + const leftTables = Object.values(left).filter((it) => is(it, MySqlTable)) as MySqlTable[]; + + const leftViews = Object.values(left).filter((it) => is(it, MySqlView)) as MySqlView[]; + + const rightTables = Object.values(right).filter((it) => is(it, MySqlTable)) as MySqlTable[]; + + const rightViews = Object.values(right).filter((it) => is(it, MySqlView)) as MySqlView[]; + + const serialized1 = generateMySqlSnapshot(leftTables, leftViews, casing); + const serialized2 = generateMySqlSnapshot(rightTables, rightViews, casing); + + const { version: v1, dialect: d1, ...rest1 } = serialized1; + const { version: v2, dialect: d2, ...rest2 } = serialized2; + + const sch1 = { + version: '5', + dialect: 'mysql', + id: '0', + prevId: '0', + ...rest1, + } as const; + + const sch2 = { + version: '5', + dialect: 'mysql', + id: '0', + prevId: '0', + ...rest2, + } as const; + + const sn1 = squashMysqlScheme(sch1); + const sn2 = squashMysqlScheme(sch2); + + const validatedPrev = mysqlSchema.parse(sch1); + const validatedCur = mysqlSchema.parse(sch2); + + const renames = new Set(renamesArr); + + if (!cli) { + const { sqlStatements, statements } = await applyMysqlSnapshotsDiff( + sn1, + sn2, + testTablesResolver(renames), + testColumnsResolver(renames), + testViewsResolverMySql(renames), + validatedPrev, + validatedCur, + ); + return { sqlStatements, statements }; + } + + const { sqlStatements, statements } = await applyMysqlSnapshotsDiff( + sn1, + sn2, + tablesResolver, + columnsResolver, + mySqlViewsResolver, + validatedPrev, + validatedCur, + ); + return { sqlStatements, statements }; +}; + +export const diffTestSchemasSingleStore = async ( + left: SinglestoreSchema, + right: SinglestoreSchema, + renamesArr: string[], + cli: boolean = false, + casing?: CasingType | undefined, +) => { + const leftTables = Object.values(left).filter((it) => is(it, SingleStoreTable)) as SingleStoreTable[]; + + /* const leftViews = Object.values(left).filter((it) => is(it, SingleStoreView)) as SingleStoreView[]; */ + + const rightTables = Object.values(right).filter((it) => is(it, SingleStoreTable)) as SingleStoreTable[]; + + /* const rightViews = Object.values(right).filter((it) => is(it, SingleStoreView)) as SingleStoreView[]; */ + + const serialized1 = generateSingleStoreSnapshot( + leftTables, + /* leftViews, */ + casing, + ); + const serialized2 = generateSingleStoreSnapshot( + rightTables, + /* rightViews, */ + casing, + ); + + const { version: v1, dialect: d1, ...rest1 } = serialized1; + const { version: v2, dialect: d2, ...rest2 } = serialized2; + + const sch1 = { + version: '1', + dialect: 'singlestore', id: '0', prevId: '0', ...rest1, } as const; const sch2 = { - version: '7', - dialect: 'postgresql', + version: '1', + dialect: 'singlestore', id: '0', prevId: '0', ...rest2, } as const; - const sn1 = squashPgScheme(sch1); - const sn2 = squashPgScheme(sch2); + const sn1 = squashSingleStoreScheme(sch1); + const sn2 = squashSingleStoreScheme(sch2); - const validatedPrev = pgSchema.parse(sch1); - const validatedCur = pgSchema.parse(sch2); + const validatedPrev = singlestoreSchema.parse(sch1); + const validatedCur = singlestoreSchema.parse(sch2); const renames = new Set(renamesArr); if (!cli) { - const { sqlStatements, statements } = await applyPgSnapshotsDiff( + const { sqlStatements, statements } = await applySingleStoreSnapshotsDiff( sn1, sn2, - testSchemasResolver(renames), - testEnumsResolver(renames), - testSequencesResolver(renames), - testPolicyResolver(renames), - testIndPolicyResolver(renames), - testRolesResolver(renames), testTablesResolver(renames), testColumnsResolver(renames), - testViewsResolver(renames), - validatedPrev, - validatedCur, - ); - return { sqlStatements, statements }; - } else { - const { sqlStatements, statements } = await applyPgSnapshotsDiff( - sn1, - sn2, - schemasResolver, - enumsResolver, - sequencesResolver, - policyResolver, - indPolicyResolver, - roleResolver, - tablesResolver, - columnsResolver, - viewsResolver, + /* testViewsResolverSingleStore(renames), */ validatedPrev, validatedCur, ); return { sqlStatements, statements }; } + + const { sqlStatements, statements } = await applySingleStoreSnapshotsDiff( + sn1, + sn2, + tablesResolver, + columnsResolver, + /* singleStoreViewsResolver, */ + validatedPrev, + validatedCur, + ); + return { sqlStatements, statements }; }; -export const diffTestSchemasPushMysql = async ( +export const diffTestSchemasPushSingleStore = async ( client: Connection, - left: MysqlSchema, - right: MysqlSchema, + left: SinglestoreSchema, + right: SinglestoreSchema, renamesArr: string[], schema: string, cli: boolean = false, casing?: CasingType | undefined, ) => { - const { sqlStatements } = await applyMySqlDiffs(left, casing); + const { sqlStatements } = await applySingleStoreDiffs(left, casing); for (const st of sqlStatements) { await client.query(st); } // do introspect into PgSchemaInternal - const introspectedSchema = await fromMySqlDatabase( + const introspectedSchema = await fromSingleStoreDatabase( { query: async (sql: string, params?: any[]) => { const res = await client.execute(sql, params); @@ -1206,58 +1640,62 @@ export const diffTestSchemasPushMysql = async ( schema, ); - const leftTables = Object.values(right).filter((it) => is(it, MySqlTable)) as MySqlTable[]; + const leftTables = Object.values(right).filter((it) => is(it, SingleStoreTable)) as SingleStoreTable[]; - const leftViews = Object.values(right).filter((it) => is(it, MySqlView)) as MySqlView[]; + /* const leftViews = Object.values(right).filter((it) => is(it, SingleStoreView)) as SingleStoreView[]; */ - const serialized2 = generateMySqlSnapshot(leftTables, leftViews, casing); + const serialized2 = generateSingleStoreSnapshot( + leftTables, + /* leftViews, */ + casing, + ); const { version: v1, dialect: d1, ...rest1 } = introspectedSchema; const { version: v2, dialect: d2, ...rest2 } = serialized2; const sch1 = { - version: '5', - dialect: 'mysql', + version: '1', + dialect: 'singlestore', id: '0', prevId: '0', ...rest1, } as const; const sch2 = { - version: '5', - dialect: 'mysql', + version: '1', + dialect: 'singlestore', id: '0', prevId: '0', ...rest2, } as const; - const sn1 = squashMysqlScheme(sch1); - const sn2 = squashMysqlScheme(sch2); + const sn1 = squashSingleStoreScheme(sch1); + const sn2 = squashSingleStoreScheme(sch2); - const validatedPrev = mysqlSchema.parse(sch1); - const validatedCur = mysqlSchema.parse(sch2); + const validatedPrev = singlestoreSchema.parse(sch1); + const validatedCur = singlestoreSchema.parse(sch2); const renames = new Set(renamesArr); if (!cli) { - const { sqlStatements, statements } = await applyMysqlSnapshotsDiff( + const { sqlStatements, statements } = await applySingleStoreSnapshotsDiff( sn1, sn2, testTablesResolver(renames), testColumnsResolver(renames), - testViewsResolverMySql(renames), + /* testViewsResolverSingleStore(renames), */ validatedPrev, validatedCur, 'push', ); return { sqlStatements, statements }; } else { - const { sqlStatements, statements } = await applyMysqlSnapshotsDiff( + const { sqlStatements, statements } = await applySingleStoreSnapshotsDiff( sn1, sn2, tablesResolver, columnsResolver, - mySqlViewsResolver, + /* singleStoreViewsResolver, */ validatedPrev, validatedCur, 'push', @@ -1266,14 +1704,17 @@ export const diffTestSchemasPushMysql = async ( } }; -export const applyMySqlDiffs = async (sn: MysqlSchema, casing: CasingType | undefined) => { +export const applySingleStoreDiffs = async ( + sn: SinglestoreSchema, + casing: CasingType | undefined, +) => { const dryRun = { - version: '5', - dialect: 'mysql', + version: '1', + dialect: 'singlestore', id: '0', prevId: '0', - views: {}, tables: {}, + views: {}, enums: {}, schemas: {}, _meta: { @@ -1283,103 +1724,33 @@ export const applyMySqlDiffs = async (sn: MysqlSchema, casing: CasingType | unde }, } as const; - const tables = Object.values(sn).filter((it) => is(it, MySqlTable)) as MySqlTable[]; + const tables = Object.values(sn).filter((it) => is(it, SingleStoreTable)) as SingleStoreTable[]; - const views = Object.values(sn).filter((it) => is(it, MySqlView)) as MySqlView[]; + /* const views = Object.values(sn).filter((it) => is(it, SingleStoreView)) as SingleStoreView[]; */ - const serialized1 = generateMySqlSnapshot(tables, views, casing); + const serialized1 = generateSingleStoreSnapshot(tables, /* views, */ casing); const { version: v1, dialect: d1, ...rest1 } = serialized1; const sch1 = { - version: '5', - dialect: 'mysql', + version: '1', + dialect: 'singlestore', id: '0', prevId: '0', ...rest1, } as const; - const sn1 = squashMysqlScheme(sch1); + const sn1 = squashSingleStoreScheme(sch1); - const validatedPrev = mysqlSchema.parse(dryRun); - const validatedCur = mysqlSchema.parse(sch1); + const validatedPrev = singlestoreSchema.parse(dryRun); + const validatedCur = singlestoreSchema.parse(sch1); - const { sqlStatements, statements } = await applyMysqlSnapshotsDiff( + const { sqlStatements, statements } = await applySingleStoreSnapshotsDiff( dryRun, sn1, testTablesResolver(new Set()), testColumnsResolver(new Set()), - testViewsResolverMySql(new Set()), - validatedPrev, - validatedCur, - ); - return { sqlStatements, statements }; -}; - -export const diffTestSchemasMysql = async ( - left: MysqlSchema, - right: MysqlSchema, - renamesArr: string[], - cli: boolean = false, - casing?: CasingType | undefined, -) => { - const leftTables = Object.values(left).filter((it) => is(it, MySqlTable)) as MySqlTable[]; - - const leftViews = Object.values(left).filter((it) => is(it, MySqlView)) as MySqlView[]; - - const rightTables = Object.values(right).filter((it) => is(it, MySqlTable)) as MySqlTable[]; - - const rightViews = Object.values(right).filter((it) => is(it, MySqlView)) as MySqlView[]; - - const serialized1 = generateMySqlSnapshot(leftTables, leftViews, casing); - const serialized2 = generateMySqlSnapshot(rightTables, rightViews, casing); - - const { version: v1, dialect: d1, ...rest1 } = serialized1; - const { version: v2, dialect: d2, ...rest2 } = serialized2; - - const sch1 = { - version: '5', - dialect: 'mysql', - id: '0', - prevId: '0', - ...rest1, - } as const; - - const sch2 = { - version: '5', - dialect: 'mysql', - id: '0', - prevId: '0', - ...rest2, - } as const; - - const sn1 = squashMysqlScheme(sch1); - const sn2 = squashMysqlScheme(sch2); - - const validatedPrev = mysqlSchema.parse(sch1); - const validatedCur = mysqlSchema.parse(sch2); - - const renames = new Set(renamesArr); - - if (!cli) { - const { sqlStatements, statements } = await applyMysqlSnapshotsDiff( - sn1, - sn2, - testTablesResolver(renames), - testColumnsResolver(renames), - testViewsResolverMySql(renames), - validatedPrev, - validatedCur, - ); - return { sqlStatements, statements }; - } - - const { sqlStatements, statements } = await applyMysqlSnapshotsDiff( - sn1, - sn2, - tablesResolver, - columnsResolver, - mySqlViewsResolver, + /* testViewsResolverSingleStore(new Set()), */ validatedPrev, validatedCur, ); @@ -1582,22 +1953,28 @@ export async function diffTestSchemasPushLibSQL( 'push', ); - const { statementsToExecute, columnsToRemove, infoToPrint, shouldAskForApprove, tablesToRemove, tablesToTruncate } = - await libSqlLogSuggestionsAndReturn( - { - query: async (sql: string, params?: any[]) => { - const res = await client.execute({ sql, args: params || [] }); - return res.rows as T[]; - }, - run: async (query: string) => { - await client.execute(query); - }, + const { + statementsToExecute, + columnsToRemove, + infoToPrint, + shouldAskForApprove, + tablesToRemove, + tablesToTruncate, + } = await libSqlLogSuggestionsAndReturn( + { + query: async (sql: string, params?: any[]) => { + const res = await client.execute({ sql, args: params || [] }); + return res.rows as T[]; + }, + run: async (query: string) => { + await client.execute(query); }, - statements, - sn1, - sn2, - _meta!, - ); + }, + statements, + sn1, + sn2, + _meta!, + ); return { sqlStatements: statementsToExecute, @@ -1911,7 +2288,9 @@ export const introspectPgToFile = async ( fs.writeFileSync(`tests/introspect/postgres/${testName}.ts`, file.file); // generate snapshot from ts file - const response = await prepareFromPgImports([`tests/introspect/postgres/${testName}.ts`]); + const response = await prepareFromPgImports([ + `tests/introspect/postgres/${testName}.ts`, + ]); const afterFileImports = generatePgSnapshot( response.tables, @@ -1938,7 +2317,10 @@ export const introspectPgToFile = async ( const sn2AfterIm = squashPgScheme(sch2); const validatedCurAfterImport = pgSchema.parse(sch2); - const { sqlStatements: afterFileSqlStatements, statements: afterFileStatements } = await applyPgSnapshotsDiff( + const { + sqlStatements: afterFileSqlStatements, + statements: afterFileStatements, + } = await applyPgSnapshotsDiff( initSn, sn2AfterIm, testSchemasResolver(new Set()), @@ -2003,9 +2385,15 @@ export const introspectMySQLToFile = async ( fs.writeFileSync(`tests/introspect/mysql/${testName}.ts`, file.file); - const response = await prepareFromMySqlImports([`tests/introspect/mysql/${testName}.ts`]); + const response = await prepareFromMySqlImports([ + `tests/introspect/mysql/${testName}.ts`, + ]); - const afterFileImports = generateMySqlSnapshot(response.tables, response.views, casing); + const afterFileImports = generateMySqlSnapshot( + response.tables, + response.views, + casing, + ); const { version: v2, dialect: d2, ...rest2 } = afterFileImports; @@ -2020,7 +2408,10 @@ export const introspectMySQLToFile = async ( const sn2AfterIm = squashMysqlScheme(sch2); const validatedCurAfterImport = mysqlSchema.parse(sch2); - const { sqlStatements: afterFileSqlStatements, statements: afterFileStatements } = await applyMysqlSnapshotsDiff( + const { + sqlStatements: afterFileSqlStatements, + statements: afterFileStatements, + } = await applyMysqlSnapshotsDiff( sn2AfterIm, initSn, testTablesResolver(new Set()), @@ -2038,6 +2429,99 @@ export const introspectMySQLToFile = async ( }; }; +export const introspectSingleStoreToFile = async ( + client: Connection, + initSchema: SinglestoreSchema, + testName: string, + schema: string, + casing?: CasingType | undefined, +) => { + // put in db + const { sqlStatements } = await applySingleStoreDiffs(initSchema, casing); + for (const st of sqlStatements) { + await client.query(st); + } + + // introspect to schema + const introspectedSchema = await fromSingleStoreDatabase( + { + query: async (sql: string, params?: any[] | undefined) => { + const res = await client.execute(sql, params); + return res[0] as any; + }, + }, + schema, + ); + + const file = schemaToTypeScriptSingleStore(introspectedSchema, 'camel'); + + fs.writeFileSync(`tests/introspect/singlestore/${testName}.ts`, file.file); + + const response = await prepareFromSingleStoreImports([ + `tests/introspect/singlestore/${testName}.ts`, + ]); + + const afterFileImports = generateSingleStoreSnapshot( + response.tables, + /* response.views, */ + casing, + ); + + const { version: v2, dialect: d2, ...rest2 } = afterFileImports; + + const sch2 = { + version: '1', + dialect: 'singlestore', + id: '0', + prevId: '0', + ...rest2, + } as const; + + const sn2AfterIm = squashSingleStoreScheme(sch2); + const validatedCurAfterImport = singlestoreSchema.parse(sch2); + + const leftTables = Object.values(initSchema).filter((it) => is(it, SingleStoreTable)) as SingleStoreTable[]; + + const initSnapshot = generateSingleStoreSnapshot( + leftTables, + /* response.views, */ + casing, + ); + + const { version: initV, dialect: initD, ...initRest } = initSnapshot; + + const initSch = { + version: '1', + dialect: 'singlestore', + id: '0', + prevId: '0', + ...initRest, + } as const; + + const initSn = squashSingleStoreScheme(initSch); + const validatedCur = singlestoreSchema.parse(initSch); + + const { + sqlStatements: afterFileSqlStatements, + statements: afterFileStatements, + } = await applySingleStoreSnapshotsDiff( + sn2AfterIm, + initSn, + testTablesResolver(new Set()), + testColumnsResolver(new Set()), + /* testViewsResolverSingleStore(new Set()), */ + validatedCurAfterImport, + validatedCur, + ); + + fs.rmSync(`tests/introspect/singlestore/${testName}.ts`); + + return { + sqlStatements: afterFileSqlStatements, + statements: afterFileStatements, + }; +}; + export const introspectSQLiteToFile = async ( client: Database, initSchema: SqliteSchema, @@ -2081,9 +2565,15 @@ export const introspectSQLiteToFile = async ( fs.writeFileSync(`tests/introspect/sqlite/${testName}.ts`, file.file); - const response = await prepareFromSqliteImports([`tests/introspect/sqlite/${testName}.ts`]); + const response = await prepareFromSqliteImports([ + `tests/introspect/sqlite/${testName}.ts`, + ]); - const afterFileImports = generateSqliteSnapshot(response.tables, response.views, casing); + const afterFileImports = generateSqliteSnapshot( + response.tables, + response.views, + casing, + ); const { version: v2, dialect: d2, ...rest2 } = afterFileImports; @@ -2098,7 +2588,10 @@ export const introspectSQLiteToFile = async ( const sn2AfterIm = squashSqliteScheme(sch2); const validatedCurAfterImport = sqliteSchema.parse(sch2); - const { sqlStatements: afterFileSqlStatements, statements: afterFileStatements } = await applySqliteSnapshotsDiff( + const { + sqlStatements: afterFileSqlStatements, + statements: afterFileStatements, + } = await applySqliteSnapshotsDiff( sn2AfterIm, initSn, testTablesResolver(new Set()), @@ -2159,9 +2652,15 @@ export const introspectLibSQLToFile = async ( fs.writeFileSync(`tests/introspect/libsql/${testName}.ts`, file.file); - const response = await prepareFromSqliteImports([`tests/introspect/libsql/${testName}.ts`]); + const response = await prepareFromSqliteImports([ + `tests/introspect/libsql/${testName}.ts`, + ]); - const afterFileImports = generateSqliteSnapshot(response.tables, response.views, casing); + const afterFileImports = generateSqliteSnapshot( + response.tables, + response.views, + casing, + ); const { version: v2, dialect: d2, ...rest2 } = afterFileImports; @@ -2176,7 +2675,10 @@ export const introspectLibSQLToFile = async ( const sn2AfterIm = squashSqliteScheme(sch2); const validatedCurAfterImport = sqliteSchema.parse(sch2); - const { sqlStatements: afterFileSqlStatements, statements: afterFileStatements } = await applyLibSQLSnapshotsDiff( + const { + sqlStatements: afterFileSqlStatements, + statements: afterFileStatements, + } = await applyLibSQLSnapshotsDiff( sn2AfterIm, initSn, testTablesResolver(new Set()), diff --git a/drizzle-kit/tests/singlestore-generated.test.ts b/drizzle-kit/tests/singlestore-generated.test.ts new file mode 100644 index 000000000..8944f3b21 --- /dev/null +++ b/drizzle-kit/tests/singlestore-generated.test.ts @@ -0,0 +1,1290 @@ +import { SQL, sql } from 'drizzle-orm'; +import { int, singlestoreTable, text } from 'drizzle-orm/singlestore-core'; +import { expect, test } from 'vitest'; +import { diffTestSchemasSingleStore } from './schemaDiffer'; + +test('generated as callback: add column with generated constraint', async () => { + const from = { + users: singlestoreTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + }), + }; + const to = { + users: singlestoreTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + (): SQL => sql`${to.users.name} || 'hello'`, + { mode: 'stored' }, + ), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasSingleStore( + from, + to, + [], + ); + + expect(statements).toStrictEqual([ + { + column: { + generated: { + as: "`users`.`name` || 'hello'", + type: 'stored', + }, + autoincrement: false, + name: 'gen_name', + notNull: false, + primaryKey: false, + type: 'text', + }, + schema: '', + tableName: 'users', + type: 'alter_table_add_column', + }, + ]); + expect(sqlStatements).toStrictEqual([ + "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'hello') STORED;", + ]); +}); + +test('generated as callback: add generated constraint to an exisiting column as stored', async () => { + const from = { + users: singlestoreTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').notNull(), + }), + }; + const to = { + users: singlestoreTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name') + .notNull() + .generatedAlwaysAs((): SQL => sql`${from.users.name} || 'to add'`, { + mode: 'stored', + }), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasSingleStore( + from, + to, + [], + ); + + expect(statements).toStrictEqual([ + { + columnDefault: undefined, + columnGenerated: { + as: "`users`.`name` || 'to add'", + type: 'stored', + }, + columnAutoIncrement: false, + columnName: 'gen_name', + columnNotNull: true, + columnOnUpdate: undefined, + columnPk: false, + newDataType: 'text', + schema: '', + tableName: 'users', + type: 'alter_table_alter_column_set_generated', + }, + ]); + expect(sqlStatements).toStrictEqual([ + "ALTER TABLE `users` MODIFY COLUMN `gen_name` text NOT NULL GENERATED ALWAYS AS (`users`.`name` || 'to add') STORED;", + ]); +}); + +test('generated as callback: add generated constraint to an exisiting column as virtual', async () => { + const from = { + users: singlestoreTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').notNull(), + }), + }; + const to = { + users: singlestoreTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name') + .notNull() + .generatedAlwaysAs((): SQL => sql`${from.users.name} || 'to add'`, { + mode: 'virtual', + }), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasSingleStore( + from, + to, + [], + ); + + expect(statements).toStrictEqual([ + { + columnAutoIncrement: false, + columnDefault: undefined, + columnGenerated: { + as: "`users`.`name` || 'to add'", + type: 'virtual', + }, + columnName: 'gen_name', + columnNotNull: true, + columnOnUpdate: undefined, + columnPk: false, + newDataType: 'text', + schema: '', + tableName: 'users', + type: 'alter_table_alter_column_set_generated', + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER TABLE `users` DROP COLUMN `gen_name`;', + "ALTER TABLE `users` ADD `gen_name` text NOT NULL GENERATED ALWAYS AS (`users`.`name` || 'to add') VIRTUAL;", + ]); +}); + +test('generated as callback: drop generated constraint as stored', async () => { + const from = { + users: singlestoreTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + (): SQL => sql`${from.users.name} || 'to delete'`, + { mode: 'stored' }, + ), + }), + }; + const to = { + users: singlestoreTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName1: text('gen_name'), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasSingleStore( + from, + to, + [], + ); + + expect(statements).toStrictEqual([ + { + columnAutoIncrement: false, + columnDefault: undefined, + columnGenerated: undefined, + columnName: 'gen_name', + columnNotNull: false, + columnOnUpdate: undefined, + columnPk: false, + newDataType: 'text', + schema: '', + tableName: 'users', + oldColumn: { + autoincrement: false, + generated: { + as: "`users`.`name` || 'to delete'", + type: 'stored', + }, + name: 'gen_name', + notNull: false, + onUpdate: undefined, + primaryKey: false, + type: 'text', + }, + type: 'alter_table_alter_column_drop_generated', + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER TABLE `users` MODIFY COLUMN `gen_name` text;', + ]); +}); + +test('generated as callback: drop generated constraint as virtual', async () => { + const from = { + users: singlestoreTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + (): SQL => sql`${from.users.name} || 'to delete'`, + { mode: 'virtual' }, + ), + }), + }; + const to = { + users: singlestoreTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName1: text('gen_name'), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasSingleStore( + from, + to, + [], + ); + + expect(statements).toStrictEqual([ + { + columnAutoIncrement: false, + columnDefault: undefined, + columnGenerated: undefined, + columnName: 'gen_name', + columnNotNull: false, + columnOnUpdate: undefined, + columnPk: false, + newDataType: 'text', + schema: '', + oldColumn: { + autoincrement: false, + generated: { + as: "`users`.`name` || 'to delete'", + type: 'virtual', + }, + name: 'gen_name', + notNull: false, + onUpdate: undefined, + primaryKey: false, + type: 'text', + }, + tableName: 'users', + type: 'alter_table_alter_column_drop_generated', + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER TABLE `users` DROP COLUMN `gen_name`;', + 'ALTER TABLE `users` ADD `gen_name` text;', + ]); +}); + +test('generated as callback: change generated constraint type from virtual to stored', async () => { + const from = { + users: singlestoreTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + (): SQL => sql`${from.users.name}`, + { mode: 'virtual' }, + ), + }), + }; + const to = { + users: singlestoreTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + (): SQL => sql`${to.users.name} || 'hello'`, + { mode: 'stored' }, + ), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasSingleStore( + from, + to, + [], + ); + + expect(statements).toStrictEqual([ + { + columnAutoIncrement: false, + columnDefault: undefined, + columnGenerated: { + as: "`users`.`name` || 'hello'", + type: 'stored', + }, + columnName: 'gen_name', + columnNotNull: false, + columnOnUpdate: undefined, + columnPk: false, + newDataType: 'text', + schema: '', + tableName: 'users', + type: 'alter_table_alter_column_alter_generated', + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER TABLE `users` drop column `gen_name`;', + "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'hello') STORED;", + ]); +}); + +test('generated as callback: change generated constraint type from stored to virtual', async () => { + const from = { + users: singlestoreTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + (): SQL => sql`${from.users.name}`, + ), + }), + }; + const to = { + users: singlestoreTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + (): SQL => sql`${to.users.name} || 'hello'`, + ), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasSingleStore( + from, + to, + [], + ); + + expect(statements).toStrictEqual([ + { + columnAutoIncrement: false, + columnDefault: undefined, + columnGenerated: { + as: "`users`.`name` || 'hello'", + type: 'virtual', + }, + columnName: 'gen_name', + columnNotNull: false, + columnOnUpdate: undefined, + columnPk: false, + newDataType: 'text', + schema: '', + tableName: 'users', + type: 'alter_table_alter_column_alter_generated', + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER TABLE `users` drop column `gen_name`;', + "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'hello') VIRTUAL;", + ]); +}); + +test('generated as callback: change generated constraint', async () => { + const from = { + users: singlestoreTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + (): SQL => sql`${from.users.name}`, + ), + }), + }; + const to = { + users: singlestoreTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + (): SQL => sql`${to.users.name} || 'hello'`, + ), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasSingleStore( + from, + to, + [], + ); + + expect(statements).toStrictEqual([ + { + columnAutoIncrement: false, + columnDefault: undefined, + columnGenerated: { + as: "`users`.`name` || 'hello'", + type: 'virtual', + }, + columnName: 'gen_name', + columnNotNull: false, + columnOnUpdate: undefined, + columnPk: false, + newDataType: 'text', + schema: '', + tableName: 'users', + type: 'alter_table_alter_column_alter_generated', + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER TABLE `users` drop column `gen_name`;', + "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'hello') VIRTUAL;", + ]); +}); + +// --- + +test('generated as sql: add column with generated constraint', async () => { + const from = { + users: singlestoreTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + }), + }; + const to = { + users: singlestoreTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + sql`\`users\`.\`name\` || 'hello'`, + { mode: 'stored' }, + ), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasSingleStore( + from, + to, + [], + ); + + expect(statements).toStrictEqual([ + { + column: { + generated: { + as: "`users`.`name` || 'hello'", + type: 'stored', + }, + autoincrement: false, + name: 'gen_name', + notNull: false, + primaryKey: false, + type: 'text', + }, + schema: '', + tableName: 'users', + type: 'alter_table_add_column', + }, + ]); + expect(sqlStatements).toStrictEqual([ + "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'hello') STORED;", + ]); +}); + +test('generated as sql: add generated constraint to an exisiting column as stored', async () => { + const from = { + users: singlestoreTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').notNull(), + }), + }; + const to = { + users: singlestoreTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name') + .notNull() + .generatedAlwaysAs(sql`\`users\`.\`name\` || 'to add'`, { + mode: 'stored', + }), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasSingleStore( + from, + to, + [], + ); + + expect(statements).toStrictEqual([ + { + columnDefault: undefined, + columnGenerated: { + as: "`users`.`name` || 'to add'", + type: 'stored', + }, + columnAutoIncrement: false, + columnName: 'gen_name', + columnNotNull: true, + columnOnUpdate: undefined, + columnPk: false, + newDataType: 'text', + schema: '', + tableName: 'users', + type: 'alter_table_alter_column_set_generated', + }, + ]); + expect(sqlStatements).toStrictEqual([ + "ALTER TABLE `users` MODIFY COLUMN `gen_name` text NOT NULL GENERATED ALWAYS AS (`users`.`name` || 'to add') STORED;", + ]); +}); + +test('generated as sql: add generated constraint to an exisiting column as virtual', async () => { + const from = { + users: singlestoreTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').notNull(), + }), + }; + const to = { + users: singlestoreTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name') + .notNull() + .generatedAlwaysAs(sql`\`users\`.\`name\` || 'to add'`, { + mode: 'virtual', + }), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasSingleStore( + from, + to, + [], + ); + + expect(statements).toStrictEqual([ + { + columnAutoIncrement: false, + columnDefault: undefined, + columnGenerated: { + as: "`users`.`name` || 'to add'", + type: 'virtual', + }, + columnName: 'gen_name', + columnNotNull: true, + columnOnUpdate: undefined, + columnPk: false, + newDataType: 'text', + schema: '', + tableName: 'users', + type: 'alter_table_alter_column_set_generated', + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER TABLE `users` DROP COLUMN `gen_name`;', + "ALTER TABLE `users` ADD `gen_name` text NOT NULL GENERATED ALWAYS AS (`users`.`name` || 'to add') VIRTUAL;", + ]); +}); + +test('generated as sql: drop generated constraint as stored', async () => { + const from = { + users: singlestoreTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + sql`\`users\`.\`name\` || 'to delete'`, + { mode: 'stored' }, + ), + }), + }; + const to = { + users: singlestoreTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName1: text('gen_name'), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasSingleStore( + from, + to, + [], + ); + + expect(statements).toStrictEqual([ + { + columnAutoIncrement: false, + columnDefault: undefined, + columnGenerated: undefined, + columnName: 'gen_name', + columnNotNull: false, + columnOnUpdate: undefined, + columnPk: false, + newDataType: 'text', + schema: '', + tableName: 'users', + oldColumn: { + autoincrement: false, + generated: { + as: "`users`.`name` || 'to delete'", + type: 'stored', + }, + name: 'gen_name', + notNull: false, + onUpdate: undefined, + primaryKey: false, + type: 'text', + }, + type: 'alter_table_alter_column_drop_generated', + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER TABLE `users` MODIFY COLUMN `gen_name` text;', + ]); +}); + +test('generated as sql: drop generated constraint as virtual', async () => { + const from = { + users: singlestoreTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + sql`\`users\`.\`name\` || 'to delete'`, + { mode: 'virtual' }, + ), + }), + }; + const to = { + users: singlestoreTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName1: text('gen_name'), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasSingleStore( + from, + to, + [], + ); + + expect(statements).toStrictEqual([ + { + columnAutoIncrement: false, + columnDefault: undefined, + columnGenerated: undefined, + columnName: 'gen_name', + columnNotNull: false, + columnOnUpdate: undefined, + columnPk: false, + newDataType: 'text', + schema: '', + oldColumn: { + autoincrement: false, + generated: { + as: "`users`.`name` || 'to delete'", + type: 'virtual', + }, + name: 'gen_name', + notNull: false, + onUpdate: undefined, + primaryKey: false, + type: 'text', + }, + tableName: 'users', + type: 'alter_table_alter_column_drop_generated', + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER TABLE `users` DROP COLUMN `gen_name`;', + 'ALTER TABLE `users` ADD `gen_name` text;', + ]); +}); + +test('generated as sql: change generated constraint type from virtual to stored', async () => { + const from = { + users: singlestoreTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + sql`\`users\`.\`name\``, + { mode: 'virtual' }, + ), + }), + }; + const to = { + users: singlestoreTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + sql`\`users\`.\`name\` || 'hello'`, + { mode: 'stored' }, + ), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasSingleStore( + from, + to, + [], + ); + + expect(statements).toStrictEqual([ + { + columnAutoIncrement: false, + columnDefault: undefined, + columnGenerated: { + as: "`users`.`name` || 'hello'", + type: 'stored', + }, + columnName: 'gen_name', + columnNotNull: false, + columnOnUpdate: undefined, + columnPk: false, + newDataType: 'text', + schema: '', + tableName: 'users', + type: 'alter_table_alter_column_alter_generated', + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER TABLE `users` drop column `gen_name`;', + "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'hello') STORED;", + ]); +}); + +test('generated as sql: change generated constraint type from stored to virtual', async () => { + const from = { + users: singlestoreTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + sql`\`users\`.\`name\``, + ), + }), + }; + const to = { + users: singlestoreTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + sql`\`users\`.\`name\` || 'hello'`, + ), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasSingleStore( + from, + to, + [], + ); + + expect(statements).toStrictEqual([ + { + columnAutoIncrement: false, + columnDefault: undefined, + columnGenerated: { + as: "`users`.`name` || 'hello'", + type: 'virtual', + }, + columnName: 'gen_name', + columnNotNull: false, + columnOnUpdate: undefined, + columnPk: false, + newDataType: 'text', + schema: '', + tableName: 'users', + type: 'alter_table_alter_column_alter_generated', + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER TABLE `users` drop column `gen_name`;', + "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'hello') VIRTUAL;", + ]); +}); + +test('generated as sql: change generated constraint', async () => { + const from = { + users: singlestoreTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + sql`\`users\`.\`name\``, + ), + }), + }; + const to = { + users: singlestoreTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + sql`\`users\`.\`name\` || 'hello'`, + ), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasSingleStore( + from, + to, + [], + ); + + expect(statements).toStrictEqual([ + { + columnAutoIncrement: false, + columnDefault: undefined, + columnGenerated: { + as: "`users`.`name` || 'hello'", + type: 'virtual', + }, + columnName: 'gen_name', + columnNotNull: false, + columnOnUpdate: undefined, + columnPk: false, + newDataType: 'text', + schema: '', + tableName: 'users', + type: 'alter_table_alter_column_alter_generated', + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER TABLE `users` drop column `gen_name`;', + "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'hello') VIRTUAL;", + ]); +}); + +// --- + +test('generated as string: add column with generated constraint', async () => { + const from = { + users: singlestoreTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + }), + }; + const to = { + users: singlestoreTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + `\`users\`.\`name\` || 'hello'`, + { mode: 'stored' }, + ), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasSingleStore( + from, + to, + [], + ); + + expect(statements).toStrictEqual([ + { + column: { + generated: { + as: "`users`.`name` || 'hello'", + type: 'stored', + }, + autoincrement: false, + name: 'gen_name', + notNull: false, + primaryKey: false, + type: 'text', + }, + schema: '', + tableName: 'users', + type: 'alter_table_add_column', + }, + ]); + expect(sqlStatements).toStrictEqual([ + "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'hello') STORED;", + ]); +}); + +test('generated as string: add generated constraint to an exisiting column as stored', async () => { + const from = { + users: singlestoreTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').notNull(), + }), + }; + const to = { + users: singlestoreTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name') + .notNull() + .generatedAlwaysAs(`\`users\`.\`name\` || 'to add'`, { + mode: 'stored', + }), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasSingleStore( + from, + to, + [], + ); + + expect(statements).toStrictEqual([ + { + columnDefault: undefined, + columnGenerated: { + as: "`users`.`name` || 'to add'", + type: 'stored', + }, + columnAutoIncrement: false, + columnName: 'gen_name', + columnNotNull: true, + columnOnUpdate: undefined, + columnPk: false, + newDataType: 'text', + schema: '', + tableName: 'users', + type: 'alter_table_alter_column_set_generated', + }, + ]); + expect(sqlStatements).toStrictEqual([ + "ALTER TABLE `users` MODIFY COLUMN `gen_name` text NOT NULL GENERATED ALWAYS AS (`users`.`name` || 'to add') STORED;", + ]); +}); + +test('generated as string: add generated constraint to an exisiting column as virtual', async () => { + const from = { + users: singlestoreTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').notNull(), + }), + }; + const to = { + users: singlestoreTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name') + .notNull() + .generatedAlwaysAs(`\`users\`.\`name\` || 'to add'`, { + mode: 'virtual', + }), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasSingleStore( + from, + to, + [], + ); + + expect(statements).toStrictEqual([ + { + columnAutoIncrement: false, + columnDefault: undefined, + columnGenerated: { + as: "`users`.`name` || 'to add'", + type: 'virtual', + }, + columnName: 'gen_name', + columnNotNull: true, + columnOnUpdate: undefined, + columnPk: false, + newDataType: 'text', + schema: '', + tableName: 'users', + type: 'alter_table_alter_column_set_generated', + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER TABLE `users` DROP COLUMN `gen_name`;', + "ALTER TABLE `users` ADD `gen_name` text NOT NULL GENERATED ALWAYS AS (`users`.`name` || 'to add') VIRTUAL;", + ]); +}); + +test('generated as string: drop generated constraint as stored', async () => { + const from = { + users: singlestoreTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + `\`users\`.\`name\` || 'to delete'`, + { mode: 'stored' }, + ), + }), + }; + const to = { + users: singlestoreTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName1: text('gen_name'), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasSingleStore( + from, + to, + [], + ); + + expect(statements).toStrictEqual([ + { + columnAutoIncrement: false, + columnDefault: undefined, + columnGenerated: undefined, + columnName: 'gen_name', + columnNotNull: false, + columnOnUpdate: undefined, + columnPk: false, + newDataType: 'text', + schema: '', + tableName: 'users', + oldColumn: { + autoincrement: false, + generated: { + as: "`users`.`name` || 'to delete'", + type: 'stored', + }, + name: 'gen_name', + notNull: false, + onUpdate: undefined, + primaryKey: false, + type: 'text', + }, + type: 'alter_table_alter_column_drop_generated', + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER TABLE `users` MODIFY COLUMN `gen_name` text;', + ]); +}); + +test('generated as string: drop generated constraint as virtual', async () => { + const from = { + users: singlestoreTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + `\`users\`.\`name\` || 'to delete'`, + { mode: 'virtual' }, + ), + }), + }; + const to = { + users: singlestoreTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName1: text('gen_name'), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasSingleStore( + from, + to, + [], + ); + + expect(statements).toStrictEqual([ + { + columnAutoIncrement: false, + columnDefault: undefined, + columnGenerated: undefined, + columnName: 'gen_name', + columnNotNull: false, + columnOnUpdate: undefined, + columnPk: false, + newDataType: 'text', + schema: '', + oldColumn: { + autoincrement: false, + generated: { + as: "`users`.`name` || 'to delete'", + type: 'virtual', + }, + name: 'gen_name', + notNull: false, + onUpdate: undefined, + primaryKey: false, + type: 'text', + }, + tableName: 'users', + type: 'alter_table_alter_column_drop_generated', + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER TABLE `users` DROP COLUMN `gen_name`;', + 'ALTER TABLE `users` ADD `gen_name` text;', + ]); +}); + +test('generated as string: change generated constraint type from virtual to stored', async () => { + const from = { + users: singlestoreTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs(`\`users\`.\`name\``, { + mode: 'virtual', + }), + }), + }; + const to = { + users: singlestoreTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + `\`users\`.\`name\` || 'hello'`, + { mode: 'stored' }, + ), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasSingleStore( + from, + to, + [], + ); + + expect(statements).toStrictEqual([ + { + columnAutoIncrement: false, + columnDefault: undefined, + columnGenerated: { + as: "`users`.`name` || 'hello'", + type: 'stored', + }, + columnName: 'gen_name', + columnNotNull: false, + columnOnUpdate: undefined, + columnPk: false, + newDataType: 'text', + schema: '', + tableName: 'users', + type: 'alter_table_alter_column_alter_generated', + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER TABLE `users` drop column `gen_name`;', + "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'hello') STORED;", + ]); +}); + +test('generated as string: change generated constraint type from stored to virtual', async () => { + const from = { + users: singlestoreTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs(`\`users\`.\`name\``), + }), + }; + const to = { + users: singlestoreTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + `\`users\`.\`name\` || 'hello'`, + ), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasSingleStore( + from, + to, + [], + ); + + expect(statements).toStrictEqual([ + { + columnAutoIncrement: false, + columnDefault: undefined, + columnGenerated: { + as: "`users`.`name` || 'hello'", + type: 'virtual', + }, + columnName: 'gen_name', + columnNotNull: false, + columnOnUpdate: undefined, + columnPk: false, + newDataType: 'text', + schema: '', + tableName: 'users', + type: 'alter_table_alter_column_alter_generated', + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER TABLE `users` drop column `gen_name`;', + "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'hello') VIRTUAL;", + ]); +}); + +test('generated as string: change generated constraint', async () => { + const from = { + users: singlestoreTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs(`\`users\`.\`name\``), + }), + }; + const to = { + users: singlestoreTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + `\`users\`.\`name\` || 'hello'`, + ), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasSingleStore( + from, + to, + [], + ); + + expect(statements).toStrictEqual([ + { + columnAutoIncrement: false, + columnDefault: undefined, + columnGenerated: { + as: "`users`.`name` || 'hello'", + type: 'virtual', + }, + columnName: 'gen_name', + columnNotNull: false, + columnOnUpdate: undefined, + columnPk: false, + newDataType: 'text', + schema: '', + tableName: 'users', + type: 'alter_table_alter_column_alter_generated', + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER TABLE `users` drop column `gen_name`;', + "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'hello') VIRTUAL;", + ]); +}); diff --git a/drizzle-kit/tests/singlestore-schemas.test.ts b/drizzle-kit/tests/singlestore-schemas.test.ts new file mode 100644 index 000000000..db9fe0480 --- /dev/null +++ b/drizzle-kit/tests/singlestore-schemas.test.ts @@ -0,0 +1,155 @@ +import { singlestoreSchema, singlestoreTable } from 'drizzle-orm/singlestore-core'; +import { expect, test } from 'vitest'; +import { diffTestSchemasSingleStore } from './schemaDiffer'; + +// We don't manage databases(schemas) in MySQL with Drizzle Kit +test('add schema #1', async () => { + const to = { + devSchema: singlestoreSchema('dev'), + }; + + const { statements } = await diffTestSchemasSingleStore({}, to, []); + + expect(statements.length).toBe(0); +}); + +test('add schema #2', async () => { + const from = { + devSchema: singlestoreSchema('dev'), + }; + const to = { + devSchema: singlestoreSchema('dev'), + devSchema2: singlestoreSchema('dev2'), + }; + + const { statements } = await diffTestSchemasSingleStore(from, to, []); + + expect(statements.length).toBe(0); +}); + +test('delete schema #1', async () => { + const from = { + devSchema: singlestoreSchema('dev'), + }; + + const { statements } = await diffTestSchemasSingleStore(from, {}, []); + + expect(statements.length).toBe(0); +}); + +test('delete schema #2', async () => { + const from = { + devSchema: singlestoreSchema('dev'), + devSchema2: singlestoreSchema('dev2'), + }; + const to = { + devSchema: singlestoreSchema('dev'), + }; + + const { statements } = await diffTestSchemasSingleStore(from, to, []); + + expect(statements.length).toBe(0); +}); + +test('rename schema #1', async () => { + const from = { + devSchema: singlestoreSchema('dev'), + }; + const to = { + devSchema2: singlestoreSchema('dev2'), + }; + + const { statements } = await diffTestSchemasSingleStore(from, to, ['dev->dev2']); + + expect(statements.length).toBe(0); +}); + +test('rename schema #2', async () => { + const from = { + devSchema: singlestoreSchema('dev'), + devSchema1: singlestoreSchema('dev1'), + }; + const to = { + devSchema: singlestoreSchema('dev'), + devSchema2: singlestoreSchema('dev2'), + }; + + const { statements } = await diffTestSchemasSingleStore(from, to, ['dev1->dev2']); + + expect(statements.length).toBe(0); +}); + +test('add table to schema #1', async () => { + const dev = singlestoreSchema('dev'); + const from = {}; + const to = { + dev, + users: dev.table('users', {}), + }; + + const { statements } = await diffTestSchemasSingleStore(from, to, ['dev1->dev2']); + + expect(statements.length).toBe(0); +}); + +test('add table to schema #2', async () => { + const dev = singlestoreSchema('dev'); + const from = { dev }; + const to = { + dev, + users: dev.table('users', {}), + }; + + const { statements } = await diffTestSchemasSingleStore(from, to, ['dev1->dev2']); + + expect(statements.length).toBe(0); +}); + +test('add table to schema #3', async () => { + const dev = singlestoreSchema('dev'); + const from = { dev }; + const to = { + dev, + usersInDev: dev.table('users', {}), + users: singlestoreTable('users', {}), + }; + + const { statements } = await diffTestSchemasSingleStore(from, to, ['dev1->dev2']); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + type: 'create_table', + tableName: 'users', + schema: undefined, + columns: [], + uniqueConstraints: [], + internals: { + tables: {}, + indexes: {}, + }, + compositePkName: '', + compositePKs: [], + }); +}); + +test('remove table from schema #1', async () => { + const dev = singlestoreSchema('dev'); + const from = { dev, users: dev.table('users', {}) }; + const to = { + dev, + }; + + const { statements } = await diffTestSchemasSingleStore(from, to, ['dev1->dev2']); + + expect(statements.length).toBe(0); +}); + +test('remove table from schema #2', async () => { + const dev = singlestoreSchema('dev'); + const from = { dev, users: dev.table('users', {}) }; + const to = {}; + + const { statements } = await diffTestSchemasSingleStore(from, to, ['dev1->dev2']); + + expect(statements.length).toBe(0); +}); diff --git a/drizzle-kit/tests/singlestore.test.ts b/drizzle-kit/tests/singlestore.test.ts new file mode 100644 index 000000000..3bdccab81 --- /dev/null +++ b/drizzle-kit/tests/singlestore.test.ts @@ -0,0 +1,580 @@ +import { sql } from 'drizzle-orm'; +import { + index, + json, + primaryKey, + serial, + singlestoreSchema, + singlestoreTable, + text, + uniqueIndex, +} from 'drizzle-orm/singlestore-core'; +import { expect, test } from 'vitest'; +import { diffTestSchemasSingleStore } from './schemaDiffer'; + +test('add table #1', async () => { + const to = { + users: singlestoreTable('users', {}), + }; + + const { statements } = await diffTestSchemasSingleStore({}, to, []); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + type: 'create_table', + tableName: 'users', + schema: undefined, + columns: [], + compositePKs: [], + internals: { + tables: {}, + indexes: {}, + }, + uniqueConstraints: [], + compositePkName: '', + }); +}); + +test('add table #2', async () => { + const to = { + users: singlestoreTable('users', { + id: serial('id').primaryKey(), + }), + }; + + const { statements } = await diffTestSchemasSingleStore({}, to, []); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + type: 'create_table', + tableName: 'users', + schema: undefined, + columns: [ + { + name: 'id', + notNull: true, + primaryKey: false, + type: 'serial', + autoincrement: true, + }, + ], + compositePKs: ['users_id;id'], + compositePkName: 'users_id', + uniqueConstraints: [], + internals: { + tables: {}, + indexes: {}, + }, + }); +}); + +test('add table #3', async () => { + const to = { + users: singlestoreTable( + 'users', + { + id: serial('id'), + }, + (t) => { + return { + pk: primaryKey({ + name: 'users_pk', + columns: [t.id], + }), + }; + }, + ), + }; + + const { statements } = await diffTestSchemasSingleStore({}, to, []); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + type: 'create_table', + tableName: 'users', + schema: undefined, + columns: [ + { + name: 'id', + notNull: true, + primaryKey: false, + type: 'serial', + autoincrement: true, + }, + ], + compositePKs: ['users_pk;id'], + uniqueConstraints: [], + compositePkName: 'users_pk', + internals: { + tables: {}, + indexes: {}, + }, + }); +}); + +test('add table #4', async () => { + const to = { + users: singlestoreTable('users', {}), + posts: singlestoreTable('posts', {}), + }; + + const { statements } = await diffTestSchemasSingleStore({}, to, []); + + expect(statements.length).toBe(2); + expect(statements[0]).toStrictEqual({ + type: 'create_table', + tableName: 'users', + schema: undefined, + columns: [], + internals: { + tables: {}, + indexes: {}, + }, + compositePKs: [], + uniqueConstraints: [], + compositePkName: '', + }); + expect(statements[1]).toStrictEqual({ + type: 'create_table', + tableName: 'posts', + schema: undefined, + columns: [], + compositePKs: [], + internals: { + tables: {}, + indexes: {}, + }, + uniqueConstraints: [], + compositePkName: '', + }); +}); + +test('add table #5', async () => { + const schema = singlestoreSchema('folder'); + const from = { + schema, + }; + + const to = { + schema, + users: schema.table('users', {}), + }; + + const { statements } = await diffTestSchemasSingleStore(from, to, []); + + expect(statements.length).toBe(0); +}); + +test('add table #6', async () => { + const from = { + users1: singlestoreTable('users1', {}), + }; + + const to = { + users2: singlestoreTable('users2', {}), + }; + + const { statements } = await diffTestSchemasSingleStore(from, to, []); + + expect(statements.length).toBe(2); + expect(statements[0]).toStrictEqual({ + type: 'create_table', + tableName: 'users2', + schema: undefined, + columns: [], + internals: { + tables: {}, + indexes: {}, + }, + compositePKs: [], + uniqueConstraints: [], + compositePkName: '', + }); + expect(statements[1]).toStrictEqual({ + policies: [], + type: 'drop_table', + tableName: 'users1', + schema: undefined, + }); +}); + +test('add table #7', async () => { + const from = { + users1: singlestoreTable('users1', {}), + }; + + const to = { + users: singlestoreTable('users', {}), + users2: singlestoreTable('users2', {}), + }; + + const { statements } = await diffTestSchemasSingleStore(from, to, [ + 'public.users1->public.users2', + ]); + + expect(statements.length).toBe(2); + expect(statements[0]).toStrictEqual({ + type: 'create_table', + tableName: 'users', + schema: undefined, + columns: [], + compositePKs: [], + uniqueConstraints: [], + internals: { + tables: {}, + indexes: {}, + }, + compositePkName: '', + }); + expect(statements[1]).toStrictEqual({ + type: 'rename_table', + tableNameFrom: 'users1', + tableNameTo: 'users2', + fromSchema: undefined, + toSchema: undefined, + }); +}); + +test('add schema + table #1', async () => { + const schema = singlestoreSchema('folder'); + + const to = { + schema, + users: schema.table('users', {}), + }; + + const { statements } = await diffTestSchemasSingleStore({}, to, []); + + expect(statements.length).toBe(0); +}); + +test('change schema with tables #1', async () => { + const schema = singlestoreSchema('folder'); + const schema2 = singlestoreSchema('folder2'); + const from = { + schema, + users: schema.table('users', {}), + }; + const to = { + schema2, + users: schema2.table('users', {}), + }; + + const { statements } = await diffTestSchemasSingleStore(from, to, [ + 'folder->folder2', + ]); + + expect(statements.length).toBe(0); +}); + +test('change table schema #1', async () => { + const schema = singlestoreSchema('folder'); + const from = { + schema, + users: singlestoreTable('users', {}), + }; + const to = { + schema, + users: schema.table('users', {}), + }; + + const { statements } = await diffTestSchemasSingleStore(from, to, [ + 'public.users->folder.users', + ]); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + policies: [], + type: 'drop_table', + tableName: 'users', + schema: undefined, + }); +}); + +test('change table schema #2', async () => { + const schema = singlestoreSchema('folder'); + const from = { + schema, + users: schema.table('users', {}), + }; + const to = { + schema, + users: singlestoreTable('users', {}), + }; + + const { statements } = await diffTestSchemasSingleStore(from, to, [ + 'folder.users->public.users', + ]); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + type: 'create_table', + tableName: 'users', + schema: undefined, + columns: [], + uniqueConstraints: [], + compositePkName: '', + compositePKs: [], + internals: { + tables: {}, + indexes: {}, + }, + }); +}); + +test('change table schema #3', async () => { + const schema1 = singlestoreSchema('folder1'); + const schema2 = singlestoreSchema('folder2'); + const from = { + schema1, + schema2, + users: schema1.table('users', {}), + }; + const to = { + schema1, + schema2, + users: schema2.table('users', {}), + }; + + const { statements } = await diffTestSchemasSingleStore(from, to, [ + 'folder1.users->folder2.users', + ]); + + expect(statements.length).toBe(0); +}); + +test('change table schema #4', async () => { + const schema1 = singlestoreSchema('folder1'); + const schema2 = singlestoreSchema('folder2'); + const from = { + schema1, + users: schema1.table('users', {}), + }; + const to = { + schema1, + schema2, // add schema + users: schema2.table('users', {}), // move table + }; + + const { statements } = await diffTestSchemasSingleStore(from, to, [ + 'folder1.users->folder2.users', + ]); + + expect(statements.length).toBe(0); +}); + +test('change table schema #5', async () => { + const schema1 = singlestoreSchema('folder1'); + const schema2 = singlestoreSchema('folder2'); + const from = { + schema1, // remove schema + users: schema1.table('users', {}), + }; + const to = { + schema2, // add schema + users: schema2.table('users', {}), // move table + }; + + const { statements } = await diffTestSchemasSingleStore(from, to, [ + 'folder1.users->folder2.users', + ]); + + expect(statements.length).toBe(0); +}); + +test('change table schema #5', async () => { + const schema1 = singlestoreSchema('folder1'); + const schema2 = singlestoreSchema('folder2'); + const from = { + schema1, + schema2, + users: schema1.table('users', {}), + }; + const to = { + schema1, + schema2, + users: schema2.table('users2', {}), // rename and move table + }; + + const { statements } = await diffTestSchemasSingleStore(from, to, [ + 'folder1.users->folder2.users2', + ]); + + expect(statements.length).toBe(0); +}); + +test('change table schema #6', async () => { + const schema1 = singlestoreSchema('folder1'); + const schema2 = singlestoreSchema('folder2'); + const from = { + schema1, + users: schema1.table('users', {}), + }; + const to = { + schema2, // rename schema + users: schema2.table('users2', {}), // rename table + }; + + const { statements } = await diffTestSchemasSingleStore(from, to, [ + 'folder1->folder2', + 'folder2.users->folder2.users2', + ]); + + expect(statements.length).toBe(0); +}); + +test('add table #10', async () => { + const to = { + users: singlestoreTable('table', { + json: json('json').default({}), + }), + }; + + const { sqlStatements } = await diffTestSchemasSingleStore({}, to, []); + expect(sqlStatements.length).toBe(1); + expect(sqlStatements[0]).toBe( + "CREATE TABLE `table` (\n\t`json` json DEFAULT '{}'\n);\n", + ); +}); + +test('add table #11', async () => { + const to = { + users: singlestoreTable('table', { + json: json('json').default([]), + }), + }; + + const { sqlStatements } = await diffTestSchemasSingleStore({}, to, []); + expect(sqlStatements.length).toBe(1); + expect(sqlStatements[0]).toBe( + "CREATE TABLE `table` (\n\t`json` json DEFAULT '[]'\n);\n", + ); +}); + +test('add table #12', async () => { + const to = { + users: singlestoreTable('table', { + json: json('json').default([1, 2, 3]), + }), + }; + + const { sqlStatements } = await diffTestSchemasSingleStore({}, to, []); + expect(sqlStatements.length).toBe(1); + expect(sqlStatements[0]).toBe( + "CREATE TABLE `table` (\n\t`json` json DEFAULT '[1,2,3]'\n);\n", + ); +}); + +test('add table #13', async () => { + const to = { + users: singlestoreTable('table', { + json: json('json').default({ key: 'value' }), + }), + }; + + const { sqlStatements } = await diffTestSchemasSingleStore({}, to, []); + expect(sqlStatements.length).toBe(1); + expect(sqlStatements[0]).toBe( + 'CREATE TABLE `table` (\n\t`json` json DEFAULT \'{"key":"value"}\'\n);\n', + ); +}); + +test('add table #14', async () => { + const to = { + users: singlestoreTable('table', { + json: json('json').default({ + key: 'value', + arr: [1, 2, 3], + }), + }), + }; + + const { sqlStatements } = await diffTestSchemasSingleStore({}, to, []); + expect(sqlStatements.length).toBe(1); + expect(sqlStatements[0]).toBe( + 'CREATE TABLE `table` (\n\t`json` json DEFAULT \'{"key":"value","arr":[1,2,3]}\'\n);\n', + ); +}); + +// TODO: add bson type tests + +// TODO: add blob type tests + +// TODO: add uuid type tests + +// TODO: add guid type tests + +// TODO: add vector type tests + +// TODO: add geopoint type tests + +test('drop index', async () => { + const from = { + users: singlestoreTable( + 'table', + { + name: text('name'), + }, + (t) => { + return { + idx: index('name_idx').on(t.name), + }; + }, + ), + }; + + const to = { + users: singlestoreTable('table', { + name: text('name'), + }), + }; + + const { sqlStatements } = await diffTestSchemasSingleStore(from, to, []); + expect(sqlStatements.length).toBe(1); + expect(sqlStatements[0]).toBe('DROP INDEX `name_idx` ON `table`;'); +}); + +test('add table with indexes', async () => { + const from = {}; + + const to = { + users: singlestoreTable( + 'users', + { + id: serial('id').primaryKey(), + name: text('name'), + email: text('email'), + }, + (t) => ({ + uniqueExpr: uniqueIndex('uniqueExpr').on(sql`(lower(${t.email}))`), + indexExpr: index('indexExpr').on(sql`(lower(${t.email}))`), + indexExprMultiple: index('indexExprMultiple').on( + sql`(lower(${t.email}))`, + sql`(lower(${t.email}))`, + ), + + uniqueCol: uniqueIndex('uniqueCol').on(t.email), + indexCol: index('indexCol').on(t.email), + indexColMultiple: index('indexColMultiple').on(t.email, t.email), + + indexColExpr: index('indexColExpr').on( + sql`(lower(${t.email}))`, + t.email, + ), + }), + ), + }; + + const { sqlStatements } = await diffTestSchemasSingleStore(from, to, []); + expect(sqlStatements.length).toBe(6); + expect(sqlStatements).toStrictEqual([ + `CREATE TABLE \`users\` (\n\t\`id\` serial AUTO_INCREMENT NOT NULL,\n\t\`name\` text,\n\t\`email\` text,\n\tCONSTRAINT \`users_id\` PRIMARY KEY(\`id\`),\n\tCONSTRAINT \`uniqueExpr\` UNIQUE((lower(\`email\`))),\n\tCONSTRAINT \`uniqueCol\` UNIQUE(\`email\`) +); +`, + 'CREATE INDEX `indexExpr` ON `users` ((lower(`email`)));', + 'CREATE INDEX `indexExprMultiple` ON `users` ((lower(`email`)),(lower(`email`)));', + 'CREATE INDEX `indexCol` ON `users` (`email`);', + 'CREATE INDEX `indexColMultiple` ON `users` (`email`,`email`);', + 'CREATE INDEX `indexColExpr` ON `users` ((lower(`email`)),`email`);', + ]); +}); diff --git a/drizzle-kit/tests/testsinglestore.ts b/drizzle-kit/tests/testsinglestore.ts new file mode 100644 index 000000000..1dc97d9c3 --- /dev/null +++ b/drizzle-kit/tests/testsinglestore.ts @@ -0,0 +1,29 @@ +import { index, singlestoreTable, text } from 'drizzle-orm/singlestore-core'; +import { diffTestSchemasSingleStore } from './schemaDiffer'; + +const from = { + users: singlestoreTable( + 'table', + { + name: text('name'), + }, + (t) => { + return { + idx: index('name_idx').on(t.name), + }; + }, + ), +}; + +const to = { + users: singlestoreTable('table', { + name: text('name'), + }), +}; + +diffTestSchemasSingleStore(from, to, []).then((res) => { + const { statements, sqlStatements } = res; + + console.log(statements); + console.log(sqlStatements); +}); diff --git a/drizzle-kit/tests/validations.test.ts b/drizzle-kit/tests/validations.test.ts index 82731ee25..8a64603bb 100644 --- a/drizzle-kit/tests/validations.test.ts +++ b/drizzle-kit/tests/validations.test.ts @@ -1,5 +1,6 @@ import { mysqlCredentials } from 'src/cli/validations/mysql'; import { postgresCredentials } from 'src/cli/validations/postgres'; +import { singlestoreCredentials } from 'src/cli/validations/singlestore'; import { sqliteCredentials } from 'src/cli/validations/sqlite'; import { expect, test } from 'vitest'; @@ -698,3 +699,171 @@ test('mysql #17', () => { }); }).toThrowError(); }); + +test('singlestore #1', () => { + expect( + singlestoreCredentials.parse({ + dialect: 'singlestore', + database: 'database', + host: 'host', + }), + ).toStrictEqual({ + database: 'database', + host: 'host', + }); +}); + +test('singlestore #2', () => { + expect( + singlestoreCredentials.parse({ + dialect: 'singlestore', + database: 'database', + host: 'host', + }), + ).toStrictEqual({ + database: 'database', + host: 'host', + }); +}); + +test('singlestore #3', () => { + expect( + singlestoreCredentials.parse({ + dialect: 'singlestore', + host: 'host', + port: 1234, + user: 'user', + password: 'password', + database: 'database', + ssl: 'require', + }), + ).toStrictEqual({ + host: 'host', + port: 1234, + user: 'user', + password: 'password', + database: 'database', + ssl: 'require', + }); +}); + +test('singlestore #4', () => { + expect( + singlestoreCredentials.parse({ + dialect: 'singlestore', + host: 'host', + database: 'database', + ssl: 'allow', + }), + ).toStrictEqual({ + host: 'host', + database: 'database', + ssl: 'allow', + }); +}); + +test('singlestore #5', () => { + expect( + singlestoreCredentials.parse({ + dialect: 'singlestore', + host: 'host', + database: 'database', + ssl: { + ca: 'ca', + cert: 'cert', + }, + }), + ).toStrictEqual({ + host: 'host', + database: 'database', + ssl: { + ca: 'ca', + cert: 'cert', + }, + }); +}); + +test('singlestore #6', () => { + expect(() => { + singlestoreCredentials.parse({ + dialect: 'singlestore', + }); + }).toThrowError(); +}); + +test('singlestore #7', () => { + expect(() => { + singlestoreCredentials.parse({ + dialect: 'singlestore', + url: undefined, + }); + }).toThrowError(); +}); + +test('singlestore #8', () => { + expect(() => { + singlestoreCredentials.parse({ + dialect: 'singlestore', + url: '', + }); + }).toThrowError(); +}); + +test('singlestore #9', () => { + expect(() => { + singlestoreCredentials.parse({ + dialect: 'singlestore', + host: '', + database: '', + }); + }).toThrowError(); +}); + +test('singlestore #10', () => { + expect(() => { + singlestoreCredentials.parse({ + dialect: 'singlestore', + database: '', + }); + }).toThrowError(); +}); + +test('singlestore #11', () => { + expect(() => { + singlestoreCredentials.parse({ + dialect: 'singlestore', + host: '', + }); + }).toThrowError(); +}); + +test('singlestore #12', () => { + expect(() => { + singlestoreCredentials.parse({ + dialect: 'singlestore', + database: ' ', + host: '', + }); + }).toThrowError(); +}); + +test('singlestore #13', () => { + expect(() => { + singlestoreCredentials.parse({ + dialect: 'singlestore', + database: '', + host: ' ', + }); + }).toThrowError(); +}); + +test('singlestore #14', () => { + expect(() => { + singlestoreCredentials.parse({ + dialect: 'singlestore', + database: ' ', + host: ' ', + port: '', + }); + }).toThrowError(); +}); diff --git a/drizzle-kit/tests/wrap-param.test.ts b/drizzle-kit/tests/wrap-param.test.ts index 542998bda..a27d27d45 100644 --- a/drizzle-kit/tests/wrap-param.test.ts +++ b/drizzle-kit/tests/wrap-param.test.ts @@ -7,6 +7,9 @@ test('wrapParam', () => { expect(wrapParam('url', 'mysql://user:password@localhost:3306/database', false, 'url')).toBe( ` [${chalk.green('✓')}] url: 'mysql://user:****@localhost:3306/database'`, ); + expect(wrapParam('url', 'singlestore://user:password@localhost:3306/database', false, 'url')).toBe( + ` [${chalk.green('✓')}] url: 'singlestore://user:****@localhost:3306/database'`, + ); expect(wrapParam('url', 'postgresql://user:password@localhost:5432/database', false, 'url')).toBe( ` [${chalk.green('✓')}] url: 'postgresql://user:****@localhost:5432/database'`, ); diff --git a/drizzle-kit/vitest.config.ts b/drizzle-kit/vitest.config.ts index 602e96ede..fd728eb11 100644 --- a/drizzle-kit/vitest.config.ts +++ b/drizzle-kit/vitest.config.ts @@ -5,8 +5,17 @@ export default defineConfig({ test: { include: [ 'tests/**/*.test.ts', + // Need to test it first before pushing changes + // 'tests/singlestore-schemas.test.ts', + // 'tests/singlestore-views.test.ts', + // 'tests/push/singlestore-push.test.ts', + // 'tests/push/singlestore.test.ts', ], + // This one was excluded because we need to modify an API for SingleStore-generated columns. + // It’s in the backlog. + exclude: ['tests/**/singlestore-generated.test.ts'], + typecheck: { tsconfig: 'tsconfig.json', }, diff --git a/drizzle-orm/package.json b/drizzle-orm/package.json index 87c3f0bc2..a4c90dae4 100644 --- a/drizzle-orm/package.json +++ b/drizzle-orm/package.json @@ -29,6 +29,7 @@ "orm", "pg", "mysql", + "singlestore", "postgresql", "postgres", "sqlite", diff --git a/drizzle-orm/src/column-builder.ts b/drizzle-orm/src/column-builder.ts index f621343d9..207f28026 100644 --- a/drizzle-orm/src/column-builder.ts +++ b/drizzle-orm/src/column-builder.ts @@ -2,6 +2,7 @@ import { entityKind } from '~/entity.ts'; import type { Column } from './column.ts'; import type { MySqlColumn } from './mysql-core/index.ts'; import type { ExtraConfigColumn, PgColumn, PgSequenceOptions } from './pg-core/index.ts'; +import type { SingleStoreColumn } from './singlestore-core/index.ts'; import type { SQL } from './sql/sql.ts'; import type { SQLiteColumn } from './sqlite-core/index.ts'; import type { Assume, Simplify } from './utils.ts'; @@ -17,7 +18,7 @@ export type ColumnDataType = | 'custom' | 'buffer'; -export type Dialect = 'pg' | 'mysql' | 'sqlite' | 'common'; +export type Dialect = 'pg' | 'mysql' | 'sqlite' | 'singlestore' | 'common'; export type GeneratedStorageMode = 'virtual' | 'stored'; @@ -314,6 +315,7 @@ export type BuildColumn< TDialect extends Dialect, > = TDialect extends 'pg' ? PgColumn> : TDialect extends 'mysql' ? MySqlColumn> + : TDialect extends 'singlestore' ? SingleStoreColumn> : TDialect extends 'sqlite' ? SQLiteColumn> : TDialect extends 'common' ? Column> : never; @@ -356,5 +358,6 @@ export type BuildExtraConfigColumns< export type ChangeColumnTableName = TDialect extends 'pg' ? PgColumn> : TDialect extends 'mysql' ? MySqlColumn> + : TDialect extends 'singlestore' ? SingleStoreColumn> : TDialect extends 'sqlite' ? SQLiteColumn> : never; diff --git a/drizzle-orm/src/singlestore-core/alias.ts b/drizzle-orm/src/singlestore-core/alias.ts new file mode 100644 index 000000000..6c08bdff3 --- /dev/null +++ b/drizzle-orm/src/singlestore-core/alias.ts @@ -0,0 +1,10 @@ +import { TableAliasProxyHandler } from '~/alias.ts'; +import type { BuildAliasTable } from './query-builders/select.types.ts'; +import type { SingleStoreTable } from './table.ts'; + +export function alias( // | SingleStoreViewBase + table: TTable, + alias: TAlias, +): BuildAliasTable { + return new Proxy(table, new TableAliasProxyHandler(alias, false)) as any; +} diff --git a/drizzle-orm/src/singlestore-core/columns/all.ts b/drizzle-orm/src/singlestore-core/columns/all.ts new file mode 100644 index 000000000..66d289e3f --- /dev/null +++ b/drizzle-orm/src/singlestore-core/columns/all.ts @@ -0,0 +1,55 @@ +import { bigint } from './bigint.ts'; +import { binary } from './binary.ts'; +import { boolean } from './boolean.ts'; +import { char } from './char.ts'; +import { customType } from './custom.ts'; +import { date } from './date.ts'; +import { datetime } from './datetime.ts'; +import { decimal } from './decimal.ts'; +import { double } from './double.ts'; +import { singlestoreEnum } from './enum.ts'; +import { float } from './float.ts'; +import { int } from './int.ts'; +import { json } from './json.ts'; +import { mediumint } from './mediumint.ts'; +import { real } from './real.ts'; +import { serial } from './serial.ts'; +import { smallint } from './smallint.ts'; +import { text } from './text.ts'; +import { time } from './time.ts'; +import { timestamp } from './timestamp.ts'; +import { tinyint } from './tinyint.ts'; +import { varbinary } from './varbinary.ts'; +import { varchar } from './varchar.ts'; +import { year } from './year.ts'; + +export function getSingleStoreColumnBuilders() { + return { + bigint, + binary, + boolean, + char, + customType, + date, + datetime, + decimal, + double, + singlestoreEnum, + float, + int, + json, + mediumint, + real, + serial, + smallint, + text, + time, + timestamp, + tinyint, + varbinary, + varchar, + year, + }; +} + +export type SingleStoreColumnBuilders = ReturnType; diff --git a/drizzle-orm/src/singlestore-core/columns/bigint.ts b/drizzle-orm/src/singlestore-core/columns/bigint.ts new file mode 100644 index 000000000..1e6b64c49 --- /dev/null +++ b/drizzle-orm/src/singlestore-core/columns/bigint.ts @@ -0,0 +1,120 @@ +import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; +import type { ColumnBaseConfig } from '~/column.ts'; +import { entityKind } from '~/entity.ts'; +import type { AnySingleStoreTable } from '~/singlestore-core/table.ts'; +import { getColumnNameAndConfig } from '~/utils.ts'; +import { SingleStoreColumnBuilderWithAutoIncrement, SingleStoreColumnWithAutoIncrement } from './common.ts'; + +export type SingleStoreBigInt53BuilderInitial = SingleStoreBigInt53Builder<{ + name: TName; + dataType: 'number'; + columnType: 'SingleStoreBigInt53'; + data: number; + driverParam: number | string; + enumValues: undefined; + generated: undefined; +}>; + +export class SingleStoreBigInt53Builder> + extends SingleStoreColumnBuilderWithAutoIncrement +{ + static override readonly [entityKind]: string = 'SingleStoreBigInt53Builder'; + + constructor(name: T['name'], unsigned: boolean = false) { + super(name, 'number', 'SingleStoreBigInt53'); + this.config.unsigned = unsigned; + } + + /** @internal */ + override build( + table: AnySingleStoreTable<{ name: TTableName }>, + ): SingleStoreBigInt53> { + return new SingleStoreBigInt53>( + table, + this.config as ColumnBuilderRuntimeConfig, + ); + } +} + +export class SingleStoreBigInt53> + extends SingleStoreColumnWithAutoIncrement +{ + static override readonly [entityKind]: string = 'SingleStoreBigInt53'; + + getSQLType(): string { + return `bigint${this.config.unsigned ? ' unsigned' : ''}`; + } + + override mapFromDriverValue(value: number | string): number { + if (typeof value === 'number') { + return value; + } + return Number(value); + } +} + +export type SingleStoreBigInt64BuilderInitial = SingleStoreBigInt64Builder<{ + name: TName; + dataType: 'bigint'; + columnType: 'SingleStoreBigInt64'; + data: bigint; + driverParam: string; + enumValues: undefined; + generated: undefined; +}>; + +export class SingleStoreBigInt64Builder> + extends SingleStoreColumnBuilderWithAutoIncrement +{ + static override readonly [entityKind]: string = 'SingleStoreBigInt64Builder'; + + constructor(name: T['name'], unsigned: boolean = false) { + super(name, 'bigint', 'SingleStoreBigInt64'); + this.config.unsigned = unsigned; + } + + /** @internal */ + override build( + table: AnySingleStoreTable<{ name: TTableName }>, + ): SingleStoreBigInt64> { + return new SingleStoreBigInt64>( + table, + this.config as ColumnBuilderRuntimeConfig, + ); + } +} + +export class SingleStoreBigInt64> + extends SingleStoreColumnWithAutoIncrement +{ + static override readonly [entityKind]: string = 'SingleStoreBigInt64'; + + getSQLType(): string { + return `bigint${this.config.unsigned ? ' unsigned' : ''}`; + } + + // eslint-disable-next-line unicorn/prefer-native-coercion-functions + override mapFromDriverValue(value: string): bigint { + return BigInt(value); + } +} + +export interface SingleStoreBigIntConfig { + mode: T; + unsigned?: boolean; +} + +export function bigint( + config: SingleStoreBigIntConfig, +): TMode extends 'number' ? SingleStoreBigInt53BuilderInitial<''> : SingleStoreBigInt64BuilderInitial<''>; +export function bigint( + name: TName, + config: SingleStoreBigIntConfig, +): TMode extends 'number' ? SingleStoreBigInt53BuilderInitial : SingleStoreBigInt64BuilderInitial; +export function bigint(a?: string | SingleStoreBigIntConfig, b?: SingleStoreBigIntConfig) { + const { name, config } = getColumnNameAndConfig(a, b); + if (config.mode === 'number') { + return new SingleStoreBigInt53Builder(name, config.unsigned); + } + return new SingleStoreBigInt64Builder(name, config.unsigned); +} diff --git a/drizzle-orm/src/singlestore-core/columns/binary.ts b/drizzle-orm/src/singlestore-core/columns/binary.ts new file mode 100644 index 000000000..153456447 --- /dev/null +++ b/drizzle-orm/src/singlestore-core/columns/binary.ts @@ -0,0 +1,70 @@ +import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; +import type { ColumnBaseConfig } from '~/column.ts'; +import { entityKind } from '~/entity.ts'; +import type { AnySingleStoreTable } from '~/singlestore-core/table.ts'; +import { getColumnNameAndConfig } from '~/utils.ts'; +import { SingleStoreColumn, SingleStoreColumnBuilder } from './common.ts'; + +export type SingleStoreBinaryBuilderInitial = SingleStoreBinaryBuilder<{ + name: TName; + dataType: 'string'; + columnType: 'SingleStoreBinary'; + data: string; + driverParam: string; + enumValues: undefined; + generated: undefined; +}>; + +export class SingleStoreBinaryBuilder> + extends SingleStoreColumnBuilder< + T, + SingleStoreBinaryConfig + > +{ + static override readonly [entityKind]: string = 'SingleStoreBinaryBuilder'; + + constructor(name: T['name'], length: number | undefined) { + super(name, 'string', 'SingleStoreBinary'); + this.config.length = length; + } + + /** @internal */ + override build( + table: AnySingleStoreTable<{ name: TTableName }>, + ): SingleStoreBinary> { + return new SingleStoreBinary>( + table, + this.config as ColumnBuilderRuntimeConfig, + ); + } +} + +export class SingleStoreBinary> extends SingleStoreColumn< + T, + SingleStoreBinaryConfig +> { + static override readonly [entityKind]: string = 'SingleStoreBinary'; + + length: number | undefined = this.config.length; + + getSQLType(): string { + return this.length === undefined ? `binary` : `binary(${this.length})`; + } +} + +export interface SingleStoreBinaryConfig { + length?: number; +} + +export function binary(): SingleStoreBinaryBuilderInitial<''>; +export function binary( + config?: SingleStoreBinaryConfig, +): SingleStoreBinaryBuilderInitial<''>; +export function binary( + name: TName, + config?: SingleStoreBinaryConfig, +): SingleStoreBinaryBuilderInitial; +export function binary(a?: string | SingleStoreBinaryConfig, b: SingleStoreBinaryConfig = {}) { + const { name, config } = getColumnNameAndConfig(a, b); + return new SingleStoreBinaryBuilder(name, config.length); +} diff --git a/drizzle-orm/src/singlestore-core/columns/boolean.ts b/drizzle-orm/src/singlestore-core/columns/boolean.ts new file mode 100644 index 000000000..bf48ff1da --- /dev/null +++ b/drizzle-orm/src/singlestore-core/columns/boolean.ts @@ -0,0 +1,58 @@ +import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; +import type { ColumnBaseConfig } from '~/column.ts'; +import { entityKind } from '~/entity.ts'; +import type { AnySingleStoreTable } from '~/singlestore-core/table.ts'; +import { SingleStoreColumn, SingleStoreColumnBuilder } from './common.ts'; + +export type SingleStoreBooleanBuilderInitial = SingleStoreBooleanBuilder<{ + name: TName; + dataType: 'boolean'; + columnType: 'SingleStoreBoolean'; + data: boolean; + driverParam: number | boolean; + enumValues: undefined; + generated: undefined; +}>; + +export class SingleStoreBooleanBuilder> + extends SingleStoreColumnBuilder +{ + static override readonly [entityKind]: string = 'SingleStoreBooleanBuilder'; + + constructor(name: T['name']) { + super(name, 'boolean', 'SingleStoreBoolean'); + } + + /** @internal */ + override build( + table: AnySingleStoreTable<{ name: TTableName }>, + ): SingleStoreBoolean> { + return new SingleStoreBoolean>( + table, + this.config as ColumnBuilderRuntimeConfig, + ); + } +} + +export class SingleStoreBoolean> + extends SingleStoreColumn +{ + static override readonly [entityKind]: string = 'SingleStoreBoolean'; + + getSQLType(): string { + return 'boolean'; + } + + override mapFromDriverValue(value: number | boolean): boolean { + if (typeof value === 'boolean') { + return value; + } + return value === 1; + } +} + +export function boolean(): SingleStoreBooleanBuilderInitial<''>; +export function boolean(name: TName): SingleStoreBooleanBuilderInitial; +export function boolean(name?: string) { + return new SingleStoreBooleanBuilder(name ?? ''); +} diff --git a/drizzle-orm/src/singlestore-core/columns/char.ts b/drizzle-orm/src/singlestore-core/columns/char.ts new file mode 100644 index 000000000..512460f92 --- /dev/null +++ b/drizzle-orm/src/singlestore-core/columns/char.ts @@ -0,0 +1,75 @@ +import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; +import type { ColumnBaseConfig } from '~/column.ts'; +import { entityKind } from '~/entity.ts'; +import type { AnySingleStoreTable } from '~/singlestore-core/table.ts'; +import { getColumnNameAndConfig, type Writable } from '~/utils.ts'; +import { SingleStoreColumn, SingleStoreColumnBuilder } from './common.ts'; + +export type SingleStoreCharBuilderInitial = + SingleStoreCharBuilder<{ + name: TName; + dataType: 'string'; + columnType: 'SingleStoreChar'; + data: TEnum[number]; + driverParam: number | string; + enumValues: TEnum; + generated: undefined; + }>; + +export class SingleStoreCharBuilder> + extends SingleStoreColumnBuilder< + T, + SingleStoreCharConfig + > +{ + static override readonly [entityKind]: string = 'SingleStoreCharBuilder'; + + constructor(name: T['name'], config: SingleStoreCharConfig) { + super(name, 'string', 'SingleStoreChar'); + this.config.length = config.length; + this.config.enum = config.enum; + } + + /** @internal */ + override build( + table: AnySingleStoreTable<{ name: TTableName }>, + ): SingleStoreChar & { enumValues: T['enumValues'] }> { + return new SingleStoreChar & { enumValues: T['enumValues'] }>( + table, + this.config as ColumnBuilderRuntimeConfig, + ); + } +} + +export class SingleStoreChar> + extends SingleStoreColumn> +{ + static override readonly [entityKind]: string = 'SingleStoreChar'; + + readonly length: number | undefined = this.config.length; + override readonly enumValues = this.config.enum; + + getSQLType(): string { + return this.length === undefined ? `char` : `char(${this.length})`; + } +} + +export interface SingleStoreCharConfig< + TEnum extends readonly string[] | string[] | undefined = readonly string[] | string[] | undefined, +> { + length?: number; + enum?: TEnum; +} + +export function char(): SingleStoreCharBuilderInitial<'', [string, ...string[]]>; +export function char>( + config?: SingleStoreCharConfig>, +): SingleStoreCharBuilderInitial<'', Writable>; +export function char>( + name: TName, + config?: SingleStoreCharConfig>, +): SingleStoreCharBuilderInitial>; +export function char(a?: string | SingleStoreCharConfig, b: SingleStoreCharConfig = {}): any { + const { name, config } = getColumnNameAndConfig(a, b); + return new SingleStoreCharBuilder(name, config as any); +} diff --git a/drizzle-orm/src/singlestore-core/columns/common.ts b/drizzle-orm/src/singlestore-core/columns/common.ts new file mode 100644 index 000000000..c0dc7fb67 --- /dev/null +++ b/drizzle-orm/src/singlestore-core/columns/common.ts @@ -0,0 +1,117 @@ +import type { + ColumnBuilderBase, + ColumnBuilderBaseConfig, + ColumnBuilderExtraConfig, + ColumnBuilderRuntimeConfig, + ColumnDataType, + HasDefault, + IsAutoincrement, + MakeColumnConfig, +} from '~/column-builder.ts'; +import { ColumnBuilder } from '~/column-builder.ts'; +import type { ColumnBaseConfig } from '~/column.ts'; +import { Column } from '~/column.ts'; +import { entityKind } from '~/entity.ts'; +import type { AnySingleStoreTable, SingleStoreTable } from '~/singlestore-core/table.ts'; +import type { SQL } from '~/sql/sql.ts'; +import type { Update } from '~/utils.ts'; +import { uniqueKeyName } from '../unique-constraint.ts'; + +export interface SingleStoreColumnBuilderBase< + T extends ColumnBuilderBaseConfig = ColumnBuilderBaseConfig, + TTypeConfig extends object = object, +> extends ColumnBuilderBase {} + +export interface SingleStoreGeneratedColumnConfig { + mode?: 'virtual' | 'stored'; +} + +export abstract class SingleStoreColumnBuilder< + T extends ColumnBuilderBaseConfig = ColumnBuilderBaseConfig & { + data: any; + }, + TRuntimeConfig extends object = object, + TTypeConfig extends object = object, + TExtraConfig extends ColumnBuilderExtraConfig = ColumnBuilderExtraConfig, +> extends ColumnBuilder + implements SingleStoreColumnBuilderBase +{ + static override readonly [entityKind]: string = 'SingleStoreColumnBuilder'; + + unique(name?: string): this { + this.config.isUnique = true; + this.config.uniqueName = name; + return this; + } + + // TODO: Implement generated columns for SingleStore (https://docs.singlestore.com/cloud/create-a-database/using-persistent-computed-columns/) + /** @internal */ + generatedAlwaysAs(as: SQL | T['data'] | (() => SQL), config?: SingleStoreGeneratedColumnConfig) { + this.config.generated = { + as, + type: 'always', + mode: config?.mode ?? 'virtual', + }; + return this as any; + } + + /** @internal */ + abstract build( + table: AnySingleStoreTable<{ name: TTableName }>, + ): SingleStoreColumn>; +} + +// To understand how to use `SingleStoreColumn` and `AnySingleStoreColumn`, see `Column` and `AnyColumn` documentation. +export abstract class SingleStoreColumn< + T extends ColumnBaseConfig = ColumnBaseConfig, + TRuntimeConfig extends object = object, +> extends Column { + static override readonly [entityKind]: string = 'SingleStoreColumn'; + + constructor( + override readonly table: SingleStoreTable, + config: ColumnBuilderRuntimeConfig, + ) { + if (!config.uniqueName) { + config.uniqueName = uniqueKeyName(table, [config.name]); + } + super(table, config); + } +} + +export type AnySingleStoreColumn> = {}> = + SingleStoreColumn< + Required, TPartial>> + >; + +export interface SingleStoreColumnWithAutoIncrementConfig { + autoIncrement: boolean; +} + +export abstract class SingleStoreColumnBuilderWithAutoIncrement< + T extends ColumnBuilderBaseConfig = ColumnBuilderBaseConfig, + TRuntimeConfig extends object = object, + TExtraConfig extends ColumnBuilderExtraConfig = ColumnBuilderExtraConfig, +> extends SingleStoreColumnBuilder { + static override readonly [entityKind]: string = 'SingleStoreColumnBuilderWithAutoIncrement'; + + constructor(name: NonNullable, dataType: T['dataType'], columnType: T['columnType']) { + super(name, dataType, columnType); + this.config.autoIncrement = false; + } + + autoincrement(): IsAutoincrement> { + this.config.autoIncrement = true; + this.config.hasDefault = true; + return this as IsAutoincrement>; + } +} + +export abstract class SingleStoreColumnWithAutoIncrement< + T extends ColumnBaseConfig = ColumnBaseConfig, + TRuntimeConfig extends object = object, +> extends SingleStoreColumn { + static override readonly [entityKind]: string = 'SingleStoreColumnWithAutoIncrement'; + + readonly autoIncrement: boolean = this.config.autoIncrement; +} diff --git a/drizzle-orm/src/singlestore-core/columns/custom.ts b/drizzle-orm/src/singlestore-core/columns/custom.ts new file mode 100644 index 000000000..964e077d7 --- /dev/null +++ b/drizzle-orm/src/singlestore-core/columns/custom.ts @@ -0,0 +1,235 @@ +import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; +import type { ColumnBaseConfig } from '~/column.ts'; +import { entityKind } from '~/entity.ts'; +import type { AnySingleStoreTable } from '~/singlestore-core/table.ts'; +import type { SQL } from '~/sql/sql.ts'; +import { type Equal, getColumnNameAndConfig } from '~/utils.ts'; +import { SingleStoreColumn, SingleStoreColumnBuilder } from './common.ts'; + +export type ConvertCustomConfig> = + & { + name: TName; + dataType: 'custom'; + columnType: 'SingleStoreCustomColumn'; + data: T['data']; + driverParam: T['driverData']; + enumValues: undefined; + generated: undefined; + } + & (T['notNull'] extends true ? { notNull: true } : {}) + & (T['default'] extends true ? { hasDefault: true } : {}); + +export interface SingleStoreCustomColumnInnerConfig { + customTypeValues: CustomTypeValues; +} + +export class SingleStoreCustomColumnBuilder> + extends SingleStoreColumnBuilder< + T, + { + fieldConfig: CustomTypeValues['config']; + customTypeParams: CustomTypeParams; + }, + { + singlestoreColumnBuilderBrand: 'SingleStoreCustomColumnBuilderBrand'; + } + > +{ + static override readonly [entityKind]: string = 'SingleStoreCustomColumnBuilder'; + + constructor( + name: T['name'], + fieldConfig: CustomTypeValues['config'], + customTypeParams: CustomTypeParams, + ) { + super(name, 'custom', 'SingleStoreCustomColumn'); + this.config.fieldConfig = fieldConfig; + this.config.customTypeParams = customTypeParams; + } + + /** @internal */ + build( + table: AnySingleStoreTable<{ name: TTableName }>, + ): SingleStoreCustomColumn> { + return new SingleStoreCustomColumn>( + table, + this.config as ColumnBuilderRuntimeConfig, + ); + } +} + +export class SingleStoreCustomColumn> + extends SingleStoreColumn +{ + static override readonly [entityKind]: string = 'SingleStoreCustomColumn'; + + private sqlName: string; + private mapTo?: (value: T['data']) => T['driverParam']; + private mapFrom?: (value: T['driverParam']) => T['data']; + + constructor( + table: AnySingleStoreTable<{ name: T['tableName'] }>, + config: SingleStoreCustomColumnBuilder['config'], + ) { + super(table, config); + this.sqlName = config.customTypeParams.dataType(config.fieldConfig); + this.mapTo = config.customTypeParams.toDriver; + this.mapFrom = config.customTypeParams.fromDriver; + } + + getSQLType(): string { + return this.sqlName; + } + + override mapFromDriverValue(value: T['driverParam']): T['data'] { + return typeof this.mapFrom === 'function' ? this.mapFrom(value) : value as T['data']; + } + + override mapToDriverValue(value: T['data']): T['driverParam'] { + return typeof this.mapTo === 'function' ? this.mapTo(value) : value as T['data']; + } +} + +export type CustomTypeValues = { + /** + * Required type for custom column, that will infer proper type model + * + * Examples: + * + * If you want your column to be `string` type after selecting/or on inserting - use `data: string`. Like `text`, `varchar` + * + * If you want your column to be `number` type after selecting/or on inserting - use `data: number`. Like `integer` + */ + data: unknown; + + /** + * Type helper, that represents what type database driver is accepting for specific database data type + */ + driverData?: unknown; + + /** + * What config type should be used for {@link CustomTypeParams} `dataType` generation + */ + config?: Record; + + /** + * Whether the config argument should be required or not + * @default false + */ + configRequired?: boolean; + + /** + * If your custom data type should be notNull by default you can use `notNull: true` + * + * @example + * const customSerial = customType<{ data: number, notNull: true, default: true }>({ + * dataType() { + * return 'serial'; + * }, + * }); + */ + notNull?: boolean; + + /** + * If your custom data type has default you can use `default: true` + * + * @example + * const customSerial = customType<{ data: number, notNull: true, default: true }>({ + * dataType() { + * return 'serial'; + * }, + * }); + */ + default?: boolean; +}; + +export interface CustomTypeParams { + /** + * Database data type string representation, that is used for migrations + * @example + * ``` + * `jsonb`, `text` + * ``` + * + * If database data type needs additional params you can use them from `config` param + * @example + * ``` + * `varchar(256)`, `numeric(2,3)` + * ``` + * + * To make `config` be of specific type please use config generic in {@link CustomTypeValues} + * + * @example + * Usage example + * ``` + * dataType() { + * return 'boolean'; + * }, + * ``` + * Or + * ``` + * dataType(config) { + * return typeof config.length !== 'undefined' ? `varchar(${config.length})` : `varchar`; + * } + * ``` + */ + dataType: (config: T['config'] | (Equal extends true ? never : undefined)) => string; + + /** + * Optional mapping function, between user input and driver + * @example + * For example, when using jsonb we need to map JS/TS object to string before writing to database + * ``` + * toDriver(value: TData): string { + * return JSON.stringify(value); + * } + * ``` + */ + toDriver?: (value: T['data']) => T['driverData'] | SQL; + + /** + * Optional mapping function, that is responsible for data mapping from database to JS/TS code + * @example + * For example, when using timestamp we need to map string Date representation to JS Date + * ``` + * fromDriver(value: string): Date { + * return new Date(value); + * }, + * ``` + */ + fromDriver?: (value: T['driverData']) => T['data']; +} + +/** + * Custom singlestore database data type generator + */ +export function customType( + customTypeParams: CustomTypeParams, +): Equal extends true ? { + & T['config']>( + fieldConfig: TConfig, + ): SingleStoreCustomColumnBuilder>; + ( + dbName: TName, + fieldConfig: T['config'], + ): SingleStoreCustomColumnBuilder>; + } + : { + (): SingleStoreCustomColumnBuilder>; + & T['config']>( + fieldConfig?: TConfig, + ): SingleStoreCustomColumnBuilder>; + ( + dbName: TName, + fieldConfig?: T['config'], + ): SingleStoreCustomColumnBuilder>; + } +{ + return ( + a?: TName | T['config'], + b?: T['config'], + ): SingleStoreCustomColumnBuilder> => { + const { name, config } = getColumnNameAndConfig(a, b); + return new SingleStoreCustomColumnBuilder(name as ConvertCustomConfig['name'], config, customTypeParams); + }; +} diff --git a/drizzle-orm/src/singlestore-core/columns/date.common.ts b/drizzle-orm/src/singlestore-core/columns/date.common.ts new file mode 100644 index 000000000..39b2507eb --- /dev/null +++ b/drizzle-orm/src/singlestore-core/columns/date.common.ts @@ -0,0 +1,41 @@ +import type { + ColumnBuilderBaseConfig, + ColumnBuilderExtraConfig, + ColumnDataType, + HasDefault, +} from '~/column-builder.ts'; +import type { ColumnBaseConfig } from '~/column.ts'; +import { entityKind } from '~/entity.ts'; +import { sql } from '~/sql/sql.ts'; +import { SingleStoreColumn, SingleStoreColumnBuilder } from './common.ts'; + +export interface SingleStoreDateColumnBaseConfig { + hasOnUpdateNow: boolean; +} + +export abstract class SingleStoreDateColumnBaseBuilder< + T extends ColumnBuilderBaseConfig, + TRuntimeConfig extends object = object, + TExtraConfig extends ColumnBuilderExtraConfig = ColumnBuilderExtraConfig, +> extends SingleStoreColumnBuilder { + static override readonly [entityKind]: string = 'SingleStoreDateColumnBuilder'; + + defaultNow() { + return this.default(sql`now()`); + } + + onUpdateNow(): HasDefault { + this.config.hasOnUpdateNow = true; + this.config.hasDefault = true; + return this as HasDefault; + } +} + +export abstract class SingleStoreDateBaseColumn< + T extends ColumnBaseConfig, + TRuntimeConfig extends object = object, +> extends SingleStoreColumn { + static override readonly [entityKind]: string = 'SingleStoreDateColumn'; + + readonly hasOnUpdateNow: boolean = this.config.hasOnUpdateNow; +} diff --git a/drizzle-orm/src/singlestore-core/columns/date.ts b/drizzle-orm/src/singlestore-core/columns/date.ts new file mode 100644 index 000000000..70da74f3a --- /dev/null +++ b/drizzle-orm/src/singlestore-core/columns/date.ts @@ -0,0 +1,123 @@ +import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; +import type { ColumnBaseConfig } from '~/column.ts'; +import { entityKind } from '~/entity.ts'; +import type { AnySingleStoreTable } from '~/singlestore-core/table.ts'; +import { type Equal, getColumnNameAndConfig } from '~/utils.ts'; +import { SingleStoreColumn, SingleStoreColumnBuilder } from './common.ts'; + +export type SingleStoreDateBuilderInitial = SingleStoreDateBuilder<{ + name: TName; + dataType: 'date'; + columnType: 'SingleStoreDate'; + data: Date; + driverParam: string | number; + enumValues: undefined; + generated: undefined; +}>; + +export class SingleStoreDateBuilder> + extends SingleStoreColumnBuilder +{ + static override readonly [entityKind]: string = 'SingleStoreDateBuilder'; + + constructor(name: T['name']) { + super(name, 'date', 'SingleStoreDate'); + } + + /** @internal */ + override build( + table: AnySingleStoreTable<{ name: TTableName }>, + ): SingleStoreDate> { + return new SingleStoreDate>( + table, + this.config as ColumnBuilderRuntimeConfig, + ); + } +} + +export class SingleStoreDate> extends SingleStoreColumn { + static override readonly [entityKind]: string = 'SingleStoreDate'; + + constructor( + table: AnySingleStoreTable<{ name: T['tableName'] }>, + config: SingleStoreDateBuilder['config'], + ) { + super(table, config); + } + + getSQLType(): string { + return `date`; + } + + override mapFromDriverValue(value: string): Date { + return new Date(value); + } +} + +export type SingleStoreDateStringBuilderInitial = SingleStoreDateStringBuilder<{ + name: TName; + dataType: 'string'; + columnType: 'SingleStoreDateString'; + data: string; + driverParam: string | number; + enumValues: undefined; + generated: undefined; +}>; + +export class SingleStoreDateStringBuilder> + extends SingleStoreColumnBuilder +{ + static override readonly [entityKind]: string = 'SingleStoreDateStringBuilder'; + + constructor(name: T['name']) { + super(name, 'string', 'SingleStoreDateString'); + } + + /** @internal */ + override build( + table: AnySingleStoreTable<{ name: TTableName }>, + ): SingleStoreDateString> { + return new SingleStoreDateString>( + table, + this.config as ColumnBuilderRuntimeConfig, + ); + } +} + +export class SingleStoreDateString> + extends SingleStoreColumn +{ + static override readonly [entityKind]: string = 'SingleStoreDateString'; + + constructor( + table: AnySingleStoreTable<{ name: T['tableName'] }>, + config: SingleStoreDateStringBuilder['config'], + ) { + super(table, config); + } + + getSQLType(): string { + return `date`; + } +} + +export interface SingleStoreDateConfig { + mode?: TMode; +} + +export function date(): SingleStoreDateBuilderInitial<''>; +export function date( + config?: SingleStoreDateConfig, +): Equal extends true ? SingleStoreDateStringBuilderInitial<''> : SingleStoreDateBuilderInitial<''>; +export function date( + name: TName, + config?: SingleStoreDateConfig, +): Equal extends true ? SingleStoreDateStringBuilderInitial + : SingleStoreDateBuilderInitial; +export function date(a?: string | SingleStoreDateConfig, b?: SingleStoreDateConfig) { + const { name, config } = getColumnNameAndConfig(a, b); + if (config?.mode === 'string') { + return new SingleStoreDateStringBuilder(name); + } + return new SingleStoreDateBuilder(name); +} diff --git a/drizzle-orm/src/singlestore-core/columns/datetime.ts b/drizzle-orm/src/singlestore-core/columns/datetime.ts new file mode 100644 index 000000000..16f137901 --- /dev/null +++ b/drizzle-orm/src/singlestore-core/columns/datetime.ts @@ -0,0 +1,153 @@ +import type { + ColumnBuilderBaseConfig, + ColumnBuilderRuntimeConfig, + GeneratedColumnConfig, + HasGenerated, + MakeColumnConfig, +} from '~/column-builder.ts'; +import type { ColumnBaseConfig } from '~/column.ts'; +import { entityKind } from '~/entity.ts'; +import type { AnySingleStoreTable } from '~/singlestore-core/table.ts'; +import type { SQL } from '~/sql/index.ts'; +import { type Equal, getColumnNameAndConfig } from '~/utils.ts'; +import { SingleStoreColumn, SingleStoreColumnBuilder } from './common.ts'; + +export type SingleStoreDateTimeBuilderInitial = SingleStoreDateTimeBuilder<{ + name: TName; + dataType: 'date'; + columnType: 'SingleStoreDateTime'; + data: Date; + driverParam: string | number; + enumValues: undefined; + generated: undefined; +}>; + +export class SingleStoreDateTimeBuilder> + extends SingleStoreColumnBuilder +{ + /** @internal */ + // TODO: we need to add a proper support for SingleStore + override generatedAlwaysAs( + _as: SQL | (() => SQL) | T['data'], + _config?: Partial>, + ): HasGenerated { + throw new Error('Method not implemented.'); + } + static override readonly [entityKind]: string = 'SingleStoreDateTimeBuilder'; + + constructor(name: T['name']) { + super(name, 'date', 'SingleStoreDateTime'); + } + + /** @internal */ + override build( + table: AnySingleStoreTable<{ name: TTableName }>, + ): SingleStoreDateTime> { + return new SingleStoreDateTime>( + table, + this.config as ColumnBuilderRuntimeConfig, + ); + } +} + +export class SingleStoreDateTime> + extends SingleStoreColumn +{ + static override readonly [entityKind]: string = 'SingleStoreDateTime'; + + constructor( + table: AnySingleStoreTable<{ name: T['tableName'] }>, + config: SingleStoreDateTimeBuilder['config'], + ) { + super(table, config); + } + + getSQLType(): string { + return `datetime`; + } + + override mapToDriverValue(value: Date): unknown { + return value.toISOString().replace('T', ' ').replace('Z', ''); + } + + override mapFromDriverValue(value: string): Date { + return new Date(value.replace(' ', 'T') + 'Z'); + } +} + +export type SingleStoreDateTimeStringBuilderInitial = SingleStoreDateTimeStringBuilder<{ + name: TName; + dataType: 'string'; + columnType: 'SingleStoreDateTimeString'; + data: string; + driverParam: string | number; + enumValues: undefined; + generated: undefined; +}>; + +export class SingleStoreDateTimeStringBuilder> + extends SingleStoreColumnBuilder +{ + /** @internal */ + // TODO: we need to add a proper support for SingleStore + override generatedAlwaysAs( + _as: SQL | (() => SQL) | T['data'], + _config?: Partial>, + ): HasGenerated { + throw new Error('Method not implemented.'); + } + static override readonly [entityKind]: string = 'SingleStoreDateTimeStringBuilder'; + + constructor(name: T['name']) { + super(name, 'string', 'SingleStoreDateTimeString'); + } + + /** @internal */ + override build( + table: AnySingleStoreTable<{ name: TTableName }>, + ): SingleStoreDateTimeString> { + return new SingleStoreDateTimeString>( + table, + this.config as ColumnBuilderRuntimeConfig, + ); + } +} + +export class SingleStoreDateTimeString> + extends SingleStoreColumn +{ + static override readonly [entityKind]: string = 'SingleStoreDateTimeString'; + + constructor( + table: AnySingleStoreTable<{ name: T['tableName'] }>, + config: SingleStoreDateTimeStringBuilder['config'], + ) { + super(table, config); + } + + getSQLType(): string { + return `datetime`; + } +} + +export interface SingleStoreDatetimeConfig { + mode?: TMode; +} + +export function datetime(): SingleStoreDateTimeBuilderInitial<''>; +export function datetime( + config?: SingleStoreDatetimeConfig, +): Equal extends true ? SingleStoreDateTimeStringBuilderInitial<''> + : SingleStoreDateTimeBuilderInitial<''>; +export function datetime( + name: TName, + config?: SingleStoreDatetimeConfig, +): Equal extends true ? SingleStoreDateTimeStringBuilderInitial + : SingleStoreDateTimeBuilderInitial; +export function datetime(a?: string | SingleStoreDatetimeConfig, b?: SingleStoreDatetimeConfig) { + const { name, config } = getColumnNameAndConfig(a, b); + if (config?.mode === 'string') { + return new SingleStoreDateTimeStringBuilder(name); + } + return new SingleStoreDateTimeBuilder(name); +} diff --git a/drizzle-orm/src/singlestore-core/columns/decimal.ts b/drizzle-orm/src/singlestore-core/columns/decimal.ts new file mode 100644 index 000000000..d0c61732c --- /dev/null +++ b/drizzle-orm/src/singlestore-core/columns/decimal.ts @@ -0,0 +1,90 @@ +import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; +import type { ColumnBaseConfig } from '~/column.ts'; +import { entityKind } from '~/entity.ts'; +import type { AnySingleStoreTable } from '~/singlestore-core/table.ts'; +import { getColumnNameAndConfig } from '~/utils.ts'; +import { SingleStoreColumnBuilderWithAutoIncrement, SingleStoreColumnWithAutoIncrement } from './common.ts'; + +export type SingleStoreDecimalBuilderInitial = SingleStoreDecimalBuilder<{ + name: TName; + dataType: 'string'; + columnType: 'SingleStoreDecimal'; + data: string; + driverParam: string; + enumValues: undefined; + generated: undefined; +}>; + +export class SingleStoreDecimalBuilder< + T extends ColumnBuilderBaseConfig<'string', 'SingleStoreDecimal'>, +> extends SingleStoreColumnBuilderWithAutoIncrement< + T, + SingleStoreDecimalConfig +> { + static override readonly [entityKind]: string = 'SingleStoreDecimalBuilder'; + + constructor(name: T['name'], config: SingleStoreDecimalConfig | undefined) { + super(name, 'string', 'SingleStoreDecimal'); + this.config.precision = config?.precision; + this.config.scale = config?.scale; + this.config.unsigned = config?.unsigned; + } + + /** @internal */ + override build( + table: AnySingleStoreTable<{ name: TTableName }>, + ): SingleStoreDecimal> { + return new SingleStoreDecimal>( + table, + this.config as ColumnBuilderRuntimeConfig, + ); + } +} + +export class SingleStoreDecimal< + T extends ColumnBaseConfig<'string', 'SingleStoreDecimal'>, +> extends SingleStoreColumnWithAutoIncrement { + static override readonly [entityKind]: string = 'SingleStoreDecimal'; + + readonly precision: number | undefined = this.config.precision; + readonly scale: number | undefined = this.config.scale; + readonly unsigned: boolean | undefined = this.config.unsigned; + + getSQLType(): string { + let type = ''; + if (this.precision !== undefined && this.scale !== undefined) { + type += `decimal(${this.precision},${this.scale})`; + } else if (this.precision === undefined) { + type += 'decimal'; + } else { + type += `decimal(${this.precision})`; + } + type = type === 'decimal(10,0)' || type === 'decimal(10)' ? 'decimal' : type; + return this.unsigned ? `${type} unsigned` : type; + } +} + +export interface SingleStoreDecimalConfig { + precision?: number; + scale?: number; + unsigned?: boolean; +} + +export function decimal(): SingleStoreDecimalBuilderInitial<''>; +export function decimal( + config: SingleStoreDecimalConfig, +): SingleStoreDecimalBuilderInitial<''>; +export function decimal( + name: TName, + config?: SingleStoreDecimalConfig, +): SingleStoreDecimalBuilderInitial; +export function decimal( + a?: string | SingleStoreDecimalConfig, + b: SingleStoreDecimalConfig = {}, +) { + const { name, config } = getColumnNameAndConfig( + a, + b, + ); + return new SingleStoreDecimalBuilder(name, config); +} diff --git a/drizzle-orm/src/singlestore-core/columns/double.ts b/drizzle-orm/src/singlestore-core/columns/double.ts new file mode 100644 index 000000000..103731eab --- /dev/null +++ b/drizzle-orm/src/singlestore-core/columns/double.ts @@ -0,0 +1,80 @@ +import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; +import type { ColumnBaseConfig } from '~/column.ts'; +import { entityKind } from '~/entity.ts'; +import type { AnySingleStoreTable } from '~/singlestore-core/table.ts'; +import { getColumnNameAndConfig } from '~/utils.ts'; +import { SingleStoreColumnBuilderWithAutoIncrement, SingleStoreColumnWithAutoIncrement } from './common.ts'; + +export type SingleStoreDoubleBuilderInitial = SingleStoreDoubleBuilder<{ + name: TName; + dataType: 'number'; + columnType: 'SingleStoreDouble'; + data: number; + driverParam: number | string; + enumValues: undefined; + generated: undefined; +}>; + +export class SingleStoreDoubleBuilder> + extends SingleStoreColumnBuilderWithAutoIncrement +{ + static override readonly [entityKind]: string = 'SingleStoreDoubleBuilder'; + + constructor(name: T['name'], config: SingleStoreDoubleConfig | undefined) { + super(name, 'number', 'SingleStoreDouble'); + this.config.precision = config?.precision; + this.config.scale = config?.scale; + this.config.unsigned = config?.unsigned; + } + + /** @internal */ + override build( + table: AnySingleStoreTable<{ name: TTableName }>, + ): SingleStoreDouble> { + return new SingleStoreDouble>( + table, + this.config as ColumnBuilderRuntimeConfig, + ); + } +} + +export class SingleStoreDouble> + extends SingleStoreColumnWithAutoIncrement +{ + static override readonly [entityKind]: string = 'SingleStoreDouble'; + + readonly precision: number | undefined = this.config.precision; + readonly scale: number | undefined = this.config.scale; + readonly unsigned: boolean | undefined = this.config.unsigned; + + getSQLType(): string { + let type = ''; + if (this.precision !== undefined && this.scale !== undefined) { + type += `double(${this.precision},${this.scale})`; + } else if (this.precision === undefined) { + type += 'double'; + } else { + type += `double(${this.precision})`; + } + return this.unsigned ? `${type} unsigned` : type; + } +} + +export interface SingleStoreDoubleConfig { + precision?: number; + scale?: number; + unsigned?: boolean; +} + +export function double(): SingleStoreDoubleBuilderInitial<''>; +export function double( + config?: SingleStoreDoubleConfig, +): SingleStoreDoubleBuilderInitial<''>; +export function double( + name: TName, + config?: SingleStoreDoubleConfig, +): SingleStoreDoubleBuilderInitial; +export function double(a?: string | SingleStoreDoubleConfig, b?: SingleStoreDoubleConfig) { + const { name, config } = getColumnNameAndConfig(a, b); + return new SingleStoreDoubleBuilder(name, config); +} diff --git a/drizzle-orm/src/singlestore-core/columns/enum.ts b/drizzle-orm/src/singlestore-core/columns/enum.ts new file mode 100644 index 000000000..98f3d78e3 --- /dev/null +++ b/drizzle-orm/src/singlestore-core/columns/enum.ts @@ -0,0 +1,84 @@ +import type { + ColumnBuilderBaseConfig, + ColumnBuilderRuntimeConfig, + GeneratedColumnConfig, + HasGenerated, + MakeColumnConfig, +} from '~/column-builder.ts'; +import type { ColumnBaseConfig } from '~/column.ts'; +import { entityKind } from '~/entity.ts'; +import type { AnySingleStoreTable } from '~/singlestore-core/table.ts'; +import type { SQL } from '~/sql/index.ts'; +import { getColumnNameAndConfig, type Writable } from '~/utils.ts'; +import { SingleStoreColumn, SingleStoreColumnBuilder } from './common.ts'; + +export type SingleStoreEnumColumnBuilderInitial = + SingleStoreEnumColumnBuilder<{ + name: TName; + dataType: 'string'; + columnType: 'SingleStoreEnumColumn'; + data: TEnum[number]; + driverParam: string; + enumValues: TEnum; + generated: undefined; + }>; + +export class SingleStoreEnumColumnBuilder> + extends SingleStoreColumnBuilder +{ + // eslint-disable-next-line @typescript-eslint/no-unused-vars + override generatedAlwaysAs( + as: SQL | (() => SQL) | T['data'], + config?: Partial>, + ): HasGenerated { + throw new Error('Method not implemented.'); + } + static override readonly [entityKind]: string = 'SingleStoreEnumColumnBuilder'; + + constructor(name: T['name'], values: T['enumValues']) { + super(name, 'string', 'SingleStoreEnumColumn'); + this.config.enumValues = values; + } + + /** @internal */ + override build( + table: AnySingleStoreTable<{ name: TTableName }>, + ): SingleStoreEnumColumn & { enumValues: T['enumValues'] }> { + return new SingleStoreEnumColumn & { enumValues: T['enumValues'] }>( + table, + this.config as ColumnBuilderRuntimeConfig, + ); + } +} + +export class SingleStoreEnumColumn> + extends SingleStoreColumn +{ + static override readonly [entityKind]: string = 'SingleStoreEnumColumn'; + + override readonly enumValues = this.config.enumValues; + + getSQLType(): string { + return `enum(${this.enumValues!.map((value) => `'${value}'`).join(',')})`; + } +} + +export function singlestoreEnum>( + values: T | Writable, +): SingleStoreEnumColumnBuilderInitial<'', Writable>; +export function singlestoreEnum>( + name: TName, + values: T | Writable, +): SingleStoreEnumColumnBuilderInitial>; +export function singlestoreEnum( + a?: string | readonly [string, ...string[]] | [string, ...string[]], + b?: readonly [string, ...string[]] | [string, ...string[]], +): any { + const { name, config: values } = getColumnNameAndConfig(a, b); + + if (values.length === 0) { + throw new Error(`You have an empty array for "${name}" enum values`); + } + + return new SingleStoreEnumColumnBuilder(name, values as any); +} diff --git a/drizzle-orm/src/singlestore-core/columns/float.ts b/drizzle-orm/src/singlestore-core/columns/float.ts new file mode 100644 index 000000000..9cfed6131 --- /dev/null +++ b/drizzle-orm/src/singlestore-core/columns/float.ts @@ -0,0 +1,80 @@ +import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; +import type { ColumnBaseConfig } from '~/column.ts'; +import { entityKind } from '~/entity.ts'; +import type { AnySingleStoreTable } from '~/singlestore-core/table.ts'; +import { getColumnNameAndConfig } from '~/utils.ts'; +import { SingleStoreColumnBuilderWithAutoIncrement, SingleStoreColumnWithAutoIncrement } from './common.ts'; + +export type SingleStoreFloatBuilderInitial = SingleStoreFloatBuilder<{ + name: TName; + dataType: 'number'; + columnType: 'SingleStoreFloat'; + data: number; + driverParam: number | string; + enumValues: undefined; + generated: undefined; +}>; + +export class SingleStoreFloatBuilder> + extends SingleStoreColumnBuilderWithAutoIncrement +{ + static override readonly [entityKind]: string = 'SingleStoreFloatBuilder'; + + constructor(name: T['name'], config: SingleStoreFloatConfig | undefined) { + super(name, 'number', 'SingleStoreFloat'); + this.config.precision = config?.precision; + this.config.scale = config?.scale; + this.config.unsigned = config?.unsigned; + } + + /** @internal */ + override build( + table: AnySingleStoreTable<{ name: TTableName }>, + ): SingleStoreFloat> { + return new SingleStoreFloat>( + table, + this.config as ColumnBuilderRuntimeConfig, + ); + } +} + +export class SingleStoreFloat> + extends SingleStoreColumnWithAutoIncrement +{ + static override readonly [entityKind]: string = 'SingleStoreFloat'; + + readonly precision: number | undefined = this.config.precision; + readonly scale: number | undefined = this.config.scale; + readonly unsigned: boolean | undefined = this.config.unsigned; + + getSQLType(): string { + let type = ''; + if (this.precision !== undefined && this.scale !== undefined) { + type += `float(${this.precision},${this.scale})`; + } else if (this.precision === undefined) { + type += 'float'; + } else { + type += `float(${this.precision},0)`; + } + return this.unsigned ? `${type} unsigned` : type; + } +} + +export interface SingleStoreFloatConfig { + precision?: number; + scale?: number; + unsigned?: boolean; +} + +export function float(): SingleStoreFloatBuilderInitial<''>; +export function float( + config?: SingleStoreFloatConfig, +): SingleStoreFloatBuilderInitial<''>; +export function float( + name: TName, + config?: SingleStoreFloatConfig, +): SingleStoreFloatBuilderInitial; +export function float(a?: string | SingleStoreFloatConfig, b?: SingleStoreFloatConfig) { + const { name, config } = getColumnNameAndConfig(a, b); + return new SingleStoreFloatBuilder(name, config); +} diff --git a/drizzle-orm/src/singlestore-core/columns/index.ts b/drizzle-orm/src/singlestore-core/columns/index.ts new file mode 100644 index 000000000..b51f0fac4 --- /dev/null +++ b/drizzle-orm/src/singlestore-core/columns/index.ts @@ -0,0 +1,25 @@ +export * from './bigint.ts'; +export * from './binary.ts'; +export * from './boolean.ts'; +export * from './char.ts'; +export * from './common.ts'; +export * from './custom.ts'; +export * from './date.ts'; +export * from './datetime.ts'; +export * from './decimal.ts'; +export * from './double.ts'; +export * from './enum.ts'; +export * from './float.ts'; +export * from './int.ts'; +export * from './json.ts'; +export * from './mediumint.ts'; +export * from './real.ts'; +export * from './serial.ts'; +export * from './smallint.ts'; +export * from './text.ts'; +export * from './time.ts'; +export * from './timestamp.ts'; +export * from './tinyint.ts'; +export * from './varbinary.ts'; +export * from './varchar.ts'; +export * from './year.ts'; diff --git a/drizzle-orm/src/singlestore-core/columns/int.ts b/drizzle-orm/src/singlestore-core/columns/int.ts new file mode 100644 index 000000000..b6a661f66 --- /dev/null +++ b/drizzle-orm/src/singlestore-core/columns/int.ts @@ -0,0 +1,71 @@ +import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; +import type { ColumnBaseConfig } from '~/column.ts'; +import { entityKind } from '~/entity.ts'; +import type { AnySingleStoreTable } from '~/singlestore-core/table.ts'; +import { getColumnNameAndConfig } from '~/utils.ts'; +import { SingleStoreColumnBuilderWithAutoIncrement, SingleStoreColumnWithAutoIncrement } from './common.ts'; + +export type SingleStoreIntBuilderInitial = SingleStoreIntBuilder<{ + name: TName; + dataType: 'number'; + columnType: 'SingleStoreInt'; + data: number; + driverParam: number | string; + enumValues: undefined; + generated: undefined; +}>; + +export class SingleStoreIntBuilder> + extends SingleStoreColumnBuilderWithAutoIncrement +{ + static override readonly [entityKind]: string = 'SingleStoreIntBuilder'; + + constructor(name: T['name'], config?: SingleStoreIntConfig) { + super(name, 'number', 'SingleStoreInt'); + this.config.unsigned = config ? config.unsigned : false; + } + + /** @internal */ + override build( + table: AnySingleStoreTable<{ name: TTableName }>, + ): SingleStoreInt> { + return new SingleStoreInt>( + table, + this.config as ColumnBuilderRuntimeConfig, + ); + } +} + +export class SingleStoreInt> + extends SingleStoreColumnWithAutoIncrement +{ + static override readonly [entityKind]: string = 'SingleStoreInt'; + + getSQLType(): string { + return `int${this.config.unsigned ? ' unsigned' : ''}`; + } + + override mapFromDriverValue(value: number | string): number { + if (typeof value === 'string') { + return Number(value); + } + return value; + } +} + +export interface SingleStoreIntConfig { + unsigned?: boolean; +} + +export function int(): SingleStoreIntBuilderInitial<''>; +export function int( + config?: SingleStoreIntConfig, +): SingleStoreIntBuilderInitial<''>; +export function int( + name: TName, + config?: SingleStoreIntConfig, +): SingleStoreIntBuilderInitial; +export function int(a?: string | SingleStoreIntConfig, b?: SingleStoreIntConfig) { + const { name, config } = getColumnNameAndConfig(a, b); + return new SingleStoreIntBuilder(name, config); +} diff --git a/drizzle-orm/src/singlestore-core/columns/json.ts b/drizzle-orm/src/singlestore-core/columns/json.ts new file mode 100644 index 000000000..97ff759d1 --- /dev/null +++ b/drizzle-orm/src/singlestore-core/columns/json.ts @@ -0,0 +1,53 @@ +import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; +import type { ColumnBaseConfig } from '~/column.ts'; +import { entityKind } from '~/entity.ts'; +import type { AnySingleStoreTable } from '~/singlestore-core/table.ts'; +import { SingleStoreColumn, SingleStoreColumnBuilder } from './common.ts'; + +export type SingleStoreJsonBuilderInitial = SingleStoreJsonBuilder<{ + name: TName; + dataType: 'json'; + columnType: 'SingleStoreJson'; + data: unknown; + driverParam: string; + enumValues: undefined; + generated: undefined; +}>; + +export class SingleStoreJsonBuilder> + extends SingleStoreColumnBuilder +{ + static override readonly [entityKind]: string = 'SingleStoreJsonBuilder'; + + constructor(name: T['name']) { + super(name, 'json', 'SingleStoreJson'); + } + + /** @internal */ + override build( + table: AnySingleStoreTable<{ name: TTableName }>, + ): SingleStoreJson> { + return new SingleStoreJson>( + table, + this.config as ColumnBuilderRuntimeConfig, + ); + } +} + +export class SingleStoreJson> extends SingleStoreColumn { + static override readonly [entityKind]: string = 'SingleStoreJson'; + + getSQLType(): string { + return 'json'; + } + + override mapToDriverValue(value: T['data']): string { + return JSON.stringify(value); + } +} + +export function json(): SingleStoreJsonBuilderInitial<''>; +export function json(name: TName): SingleStoreJsonBuilderInitial; +export function json(name?: string) { + return new SingleStoreJsonBuilder(name ?? ''); +} diff --git a/drizzle-orm/src/singlestore-core/columns/mediumint.ts b/drizzle-orm/src/singlestore-core/columns/mediumint.ts new file mode 100644 index 000000000..4a5fa80f9 --- /dev/null +++ b/drizzle-orm/src/singlestore-core/columns/mediumint.ts @@ -0,0 +1,68 @@ +import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; +import type { ColumnBaseConfig } from '~/column.ts'; +import { entityKind } from '~/entity.ts'; +import type { AnySingleStoreTable } from '~/singlestore-core/table.ts'; +import { getColumnNameAndConfig } from '~/utils.ts'; +import { SingleStoreColumnBuilderWithAutoIncrement, SingleStoreColumnWithAutoIncrement } from './common.ts'; +import type { SingleStoreIntConfig } from './int.ts'; + +export type SingleStoreMediumIntBuilderInitial = SingleStoreMediumIntBuilder<{ + name: TName; + dataType: 'number'; + columnType: 'SingleStoreMediumInt'; + data: number; + driverParam: number | string; + enumValues: undefined; + generated: undefined; +}>; + +export class SingleStoreMediumIntBuilder> + extends SingleStoreColumnBuilderWithAutoIncrement +{ + static override readonly [entityKind]: string = 'SingleStoreMediumIntBuilder'; + + constructor(name: T['name'], config?: SingleStoreIntConfig) { + super(name, 'number', 'SingleStoreMediumInt'); + this.config.unsigned = config ? config.unsigned : false; + } + + /** @internal */ + override build( + table: AnySingleStoreTable<{ name: TTableName }>, + ): SingleStoreMediumInt> { + return new SingleStoreMediumInt>( + table, + this.config as ColumnBuilderRuntimeConfig, + ); + } +} + +export class SingleStoreMediumInt> + extends SingleStoreColumnWithAutoIncrement +{ + static override readonly [entityKind]: string = 'SingleStoreMediumInt'; + + getSQLType(): string { + return `mediumint${this.config.unsigned ? ' unsigned' : ''}`; + } + + override mapFromDriverValue(value: number | string): number { + if (typeof value === 'string') { + return Number(value); + } + return value; + } +} + +export function mediumint(): SingleStoreMediumIntBuilderInitial<''>; +export function mediumint( + config?: SingleStoreIntConfig, +): SingleStoreMediumIntBuilderInitial<''>; +export function mediumint( + name: TName, + config?: SingleStoreIntConfig, +): SingleStoreMediumIntBuilderInitial; +export function mediumint(a?: string | SingleStoreIntConfig, b?: SingleStoreIntConfig) { + const { name, config } = getColumnNameAndConfig(a, b); + return new SingleStoreMediumIntBuilder(name, config); +} diff --git a/drizzle-orm/src/singlestore-core/columns/real.ts b/drizzle-orm/src/singlestore-core/columns/real.ts new file mode 100644 index 000000000..53d15345c --- /dev/null +++ b/drizzle-orm/src/singlestore-core/columns/real.ts @@ -0,0 +1,81 @@ +import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; +import type { ColumnBaseConfig } from '~/column.ts'; +import { entityKind } from '~/entity.ts'; +import type { AnySingleStoreTable } from '~/singlestore-core/table.ts'; +import { getColumnNameAndConfig } from '~/utils.ts'; +import { SingleStoreColumnBuilderWithAutoIncrement, SingleStoreColumnWithAutoIncrement } from './common.ts'; + +export type SingleStoreRealBuilderInitial = SingleStoreRealBuilder<{ + name: TName; + dataType: 'number'; + columnType: 'SingleStoreReal'; + data: number; + driverParam: number | string; + enumValues: undefined; + generated: undefined; +}>; + +export class SingleStoreRealBuilder> + extends SingleStoreColumnBuilderWithAutoIncrement< + T, + SingleStoreRealConfig + > +{ + static override readonly [entityKind]: string = 'SingleStoreRealBuilder'; + + constructor(name: T['name'], config: SingleStoreRealConfig | undefined) { + super(name, 'number', 'SingleStoreReal'); + this.config.precision = config?.precision; + this.config.scale = config?.scale; + } + + /** @internal */ + override build( + table: AnySingleStoreTable<{ name: TTableName }>, + ): SingleStoreReal> { + return new SingleStoreReal>( + table, + this.config as ColumnBuilderRuntimeConfig, + ); + } +} + +export class SingleStoreReal> + extends SingleStoreColumnWithAutoIncrement< + T, + SingleStoreRealConfig + > +{ + static override readonly [entityKind]: string = 'SingleStoreReal'; + + precision: number | undefined = this.config.precision; + scale: number | undefined = this.config.scale; + + getSQLType(): string { + if (this.precision !== undefined && this.scale !== undefined) { + return `real(${this.precision}, ${this.scale})`; + } else if (this.precision === undefined) { + return 'real'; + } else { + return `real(${this.precision})`; + } + } +} + +export interface SingleStoreRealConfig { + precision?: number; + scale?: number; +} + +export function real(): SingleStoreRealBuilderInitial<''>; +export function real( + config?: SingleStoreRealConfig, +): SingleStoreRealBuilderInitial<''>; +export function real( + name: TName, + config?: SingleStoreRealConfig, +): SingleStoreRealBuilderInitial; +export function real(a?: string | SingleStoreRealConfig, b: SingleStoreRealConfig = {}) { + const { name, config } = getColumnNameAndConfig(a, b); + return new SingleStoreRealBuilder(name, config); +} diff --git a/drizzle-orm/src/singlestore-core/columns/serial.ts b/drizzle-orm/src/singlestore-core/columns/serial.ts new file mode 100644 index 000000000..df415d47e --- /dev/null +++ b/drizzle-orm/src/singlestore-core/columns/serial.ts @@ -0,0 +1,76 @@ +import type { + ColumnBuilderBaseConfig, + ColumnBuilderRuntimeConfig, + HasDefault, + IsAutoincrement, + IsPrimaryKey, + MakeColumnConfig, + NotNull, +} from '~/column-builder.ts'; +import type { ColumnBaseConfig } from '~/column.ts'; +import { entityKind } from '~/entity.ts'; +import type { AnySingleStoreTable } from '~/singlestore-core/table.ts'; +import { SingleStoreColumnBuilderWithAutoIncrement, SingleStoreColumnWithAutoIncrement } from './common.ts'; + +export type SingleStoreSerialBuilderInitial = IsAutoincrement< + IsPrimaryKey< + NotNull< + HasDefault< + SingleStoreSerialBuilder<{ + name: TName; + dataType: 'number'; + columnType: 'SingleStoreSerial'; + data: number; + driverParam: number; + enumValues: undefined; + generated: undefined; + }> + > + > + > +>; + +export class SingleStoreSerialBuilder> + extends SingleStoreColumnBuilderWithAutoIncrement +{ + static override readonly [entityKind]: string = 'SingleStoreSerialBuilder'; + + constructor(name: T['name']) { + super(name, 'number', 'SingleStoreSerial'); + this.config.hasDefault = true; + this.config.autoIncrement = true; + } + + /** @internal */ + override build( + table: AnySingleStoreTable<{ name: TTableName }>, + ): SingleStoreSerial> { + return new SingleStoreSerial>( + table, + this.config as ColumnBuilderRuntimeConfig, + ); + } +} + +export class SingleStoreSerial< + T extends ColumnBaseConfig<'number', 'SingleStoreSerial'>, +> extends SingleStoreColumnWithAutoIncrement { + static override readonly [entityKind]: string = 'SingleStoreSerial'; + + getSQLType(): string { + return 'serial'; + } + + override mapFromDriverValue(value: number | string): number { + if (typeof value === 'string') { + return Number(value); + } + return value; + } +} + +export function serial(): SingleStoreSerialBuilderInitial<''>; +export function serial(name: TName): SingleStoreSerialBuilderInitial; +export function serial(name?: string) { + return new SingleStoreSerialBuilder(name ?? ''); +} diff --git a/drizzle-orm/src/singlestore-core/columns/smallint.ts b/drizzle-orm/src/singlestore-core/columns/smallint.ts new file mode 100644 index 000000000..3f504b68c --- /dev/null +++ b/drizzle-orm/src/singlestore-core/columns/smallint.ts @@ -0,0 +1,68 @@ +import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; +import type { ColumnBaseConfig } from '~/column.ts'; +import { entityKind } from '~/entity.ts'; +import type { AnySingleStoreTable } from '~/singlestore-core/table.ts'; +import { getColumnNameAndConfig } from '~/utils.ts'; +import { SingleStoreColumnBuilderWithAutoIncrement, SingleStoreColumnWithAutoIncrement } from './common.ts'; +import type { SingleStoreIntConfig } from './int.ts'; + +export type SingleStoreSmallIntBuilderInitial = SingleStoreSmallIntBuilder<{ + name: TName; + dataType: 'number'; + columnType: 'SingleStoreSmallInt'; + data: number; + driverParam: number | string; + enumValues: undefined; + generated: undefined; +}>; + +export class SingleStoreSmallIntBuilder> + extends SingleStoreColumnBuilderWithAutoIncrement +{ + static override readonly [entityKind]: string = 'SingleStoreSmallIntBuilder'; + + constructor(name: T['name'], config?: SingleStoreIntConfig) { + super(name, 'number', 'SingleStoreSmallInt'); + this.config.unsigned = config ? config.unsigned : false; + } + + /** @internal */ + override build( + table: AnySingleStoreTable<{ name: TTableName }>, + ): SingleStoreSmallInt> { + return new SingleStoreSmallInt>( + table, + this.config as ColumnBuilderRuntimeConfig, + ); + } +} + +export class SingleStoreSmallInt> + extends SingleStoreColumnWithAutoIncrement +{ + static override readonly [entityKind]: string = 'SingleStoreSmallInt'; + + getSQLType(): string { + return `smallint${this.config.unsigned ? ' unsigned' : ''}`; + } + + override mapFromDriverValue(value: number | string): number { + if (typeof value === 'string') { + return Number(value); + } + return value; + } +} + +export function smallint(): SingleStoreSmallIntBuilderInitial<''>; +export function smallint( + config?: SingleStoreIntConfig, +): SingleStoreSmallIntBuilderInitial<''>; +export function smallint( + name: TName, + config?: SingleStoreIntConfig, +): SingleStoreSmallIntBuilderInitial; +export function smallint(a?: string | SingleStoreIntConfig, b?: SingleStoreIntConfig) { + const { name, config } = getColumnNameAndConfig(a, b); + return new SingleStoreSmallIntBuilder(name, config); +} diff --git a/drizzle-orm/src/singlestore-core/columns/text.ts b/drizzle-orm/src/singlestore-core/columns/text.ts new file mode 100644 index 000000000..425da550f --- /dev/null +++ b/drizzle-orm/src/singlestore-core/columns/text.ts @@ -0,0 +1,116 @@ +import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; +import type { ColumnBaseConfig } from '~/column.ts'; +import { entityKind } from '~/entity.ts'; +import type { AnySingleStoreTable } from '~/singlestore-core/table.ts'; +import { getColumnNameAndConfig, type Writable } from '~/utils.ts'; +import { SingleStoreColumn, SingleStoreColumnBuilder } from './common.ts'; + +export type SingleStoreTextColumnType = 'tinytext' | 'text' | 'mediumtext' | 'longtext'; + +export type SingleStoreTextBuilderInitial = + SingleStoreTextBuilder<{ + name: TName; + dataType: 'string'; + columnType: 'SingleStoreText'; + data: TEnum[number]; + driverParam: string; + enumValues: TEnum; + generated: undefined; + }>; + +export class SingleStoreTextBuilder> + extends SingleStoreColumnBuilder< + T, + { textType: SingleStoreTextColumnType; enumValues: T['enumValues'] } + > +{ + static override readonly [entityKind]: string = 'SingleStoreTextBuilder'; + + constructor(name: T['name'], textType: SingleStoreTextColumnType, config: SingleStoreTextConfig) { + super(name, 'string', 'SingleStoreText'); + this.config.textType = textType; + this.config.enumValues = config.enum; + } + + /** @internal */ + override build( + table: AnySingleStoreTable<{ name: TTableName }>, + ): SingleStoreText> { + return new SingleStoreText>( + table, + this.config as ColumnBuilderRuntimeConfig, + ); + } +} + +export class SingleStoreText> + extends SingleStoreColumn +{ + static override readonly [entityKind]: string = 'SingleStoreText'; + + private textType: SingleStoreTextColumnType = this.config.textType; + + override readonly enumValues = this.config.enumValues; + + getSQLType(): string { + return this.textType; + } +} + +export interface SingleStoreTextConfig< + TEnum extends readonly string[] | string[] | undefined = readonly string[] | string[] | undefined, +> { + enum?: TEnum; +} + +export function text(): SingleStoreTextBuilderInitial<'', [string, ...string[]]>; +export function text>( + config?: SingleStoreTextConfig>, +): SingleStoreTextBuilderInitial<'', Writable>; +export function text>( + name: TName, + config?: SingleStoreTextConfig>, +): SingleStoreTextBuilderInitial>; +export function text(a?: string | SingleStoreTextConfig, b: SingleStoreTextConfig = {}): any { + const { name, config } = getColumnNameAndConfig(a, b); + return new SingleStoreTextBuilder(name, 'text', config as any); +} + +export function tinytext(): SingleStoreTextBuilderInitial<'', [string, ...string[]]>; +export function tinytext>( + config?: SingleStoreTextConfig>, +): SingleStoreTextBuilderInitial<'', Writable>; +export function tinytext>( + name: TName, + config?: SingleStoreTextConfig>, +): SingleStoreTextBuilderInitial>; +export function tinytext(a?: string | SingleStoreTextConfig, b: SingleStoreTextConfig = {}): any { + const { name, config } = getColumnNameAndConfig(a, b); + return new SingleStoreTextBuilder(name, 'tinytext', config as any); +} + +export function mediumtext(): SingleStoreTextBuilderInitial<'', [string, ...string[]]>; +export function mediumtext>( + config?: SingleStoreTextConfig>, +): SingleStoreTextBuilderInitial<'', Writable>; +export function mediumtext>( + name: TName, + config?: SingleStoreTextConfig>, +): SingleStoreTextBuilderInitial>; +export function mediumtext(a?: string | SingleStoreTextConfig, b: SingleStoreTextConfig = {}): any { + const { name, config } = getColumnNameAndConfig(a, b); + return new SingleStoreTextBuilder(name, 'mediumtext', config as any); +} + +export function longtext(): SingleStoreTextBuilderInitial<'', [string, ...string[]]>; +export function longtext>( + config?: SingleStoreTextConfig>, +): SingleStoreTextBuilderInitial<'', Writable>; +export function longtext>( + name: TName, + config?: SingleStoreTextConfig>, +): SingleStoreTextBuilderInitial>; +export function longtext(a?: string | SingleStoreTextConfig, b: SingleStoreTextConfig = {}): any { + const { name, config } = getColumnNameAndConfig(a, b); + return new SingleStoreTextBuilder(name, 'longtext', config as any); +} diff --git a/drizzle-orm/src/singlestore-core/columns/time.ts b/drizzle-orm/src/singlestore-core/columns/time.ts new file mode 100644 index 000000000..b7605a4ee --- /dev/null +++ b/drizzle-orm/src/singlestore-core/columns/time.ts @@ -0,0 +1,55 @@ +import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; +import type { ColumnBaseConfig } from '~/column.ts'; +import { entityKind } from '~/entity.ts'; +import type { AnySingleStoreTable } from '~/singlestore-core/table.ts'; +import { SingleStoreColumn, SingleStoreColumnBuilder } from './common.ts'; + +export type SingleStoreTimeBuilderInitial = SingleStoreTimeBuilder<{ + name: TName; + dataType: 'string'; + columnType: 'SingleStoreTime'; + data: string; + driverParam: string | number; + enumValues: undefined; + generated: undefined; +}>; + +export class SingleStoreTimeBuilder> + extends SingleStoreColumnBuilder< + T + > +{ + static override readonly [entityKind]: string = 'SingleStoreTimeBuilder'; + + constructor( + name: T['name'], + ) { + super(name, 'string', 'SingleStoreTime'); + } + + /** @internal */ + override build( + table: AnySingleStoreTable<{ name: TTableName }>, + ): SingleStoreTime> { + return new SingleStoreTime>( + table, + this.config as ColumnBuilderRuntimeConfig, + ); + } +} + +export class SingleStoreTime< + T extends ColumnBaseConfig<'string', 'SingleStoreTime'>, +> extends SingleStoreColumn { + static override readonly [entityKind]: string = 'SingleStoreTime'; + + getSQLType(): string { + return `time`; + } +} + +export function time(): SingleStoreTimeBuilderInitial<''>; +export function time(name: TName): SingleStoreTimeBuilderInitial; +export function time(name?: string) { + return new SingleStoreTimeBuilder(name ?? ''); +} diff --git a/drizzle-orm/src/singlestore-core/columns/timestamp.ts b/drizzle-orm/src/singlestore-core/columns/timestamp.ts new file mode 100644 index 000000000..ea7342fd7 --- /dev/null +++ b/drizzle-orm/src/singlestore-core/columns/timestamp.ts @@ -0,0 +1,125 @@ +import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; +import type { ColumnBaseConfig } from '~/column.ts'; +import { entityKind } from '~/entity.ts'; +import type { AnySingleStoreTable } from '~/singlestore-core/table.ts'; +import { sql } from '~/sql/sql.ts'; +import { type Equal, getColumnNameAndConfig } from '~/utils.ts'; +import { SingleStoreDateBaseColumn, SingleStoreDateColumnBaseBuilder } from './date.common.ts'; + +export type SingleStoreTimestampBuilderInitial = SingleStoreTimestampBuilder<{ + name: TName; + dataType: 'date'; + columnType: 'SingleStoreTimestamp'; + data: Date; + driverParam: string | number; + enumValues: undefined; + generated: undefined; +}>; + +export class SingleStoreTimestampBuilder> + extends SingleStoreDateColumnBaseBuilder +{ + static override readonly [entityKind]: string = 'SingleStoreTimestampBuilder'; + + constructor(name: T['name']) { + super(name, 'date', 'SingleStoreTimestamp'); + } + + /** @internal */ + override build( + table: AnySingleStoreTable<{ name: TTableName }>, + ): SingleStoreTimestamp> { + return new SingleStoreTimestamp>( + table, + this.config as ColumnBuilderRuntimeConfig, + ); + } + + override defaultNow() { + return this.default(sql`CURRENT_TIMESTAMP`); + } +} + +export class SingleStoreTimestamp> + extends SingleStoreDateBaseColumn +{ + static override readonly [entityKind]: string = 'SingleStoreTimestamp'; + + getSQLType(): string { + return `timestamp`; + } + + override mapFromDriverValue(value: string): Date { + return new Date(value + '+0000'); + } + + override mapToDriverValue(value: Date): string { + return value.toISOString().slice(0, -1).replace('T', ' '); + } +} + +export type SingleStoreTimestampStringBuilderInitial = SingleStoreTimestampStringBuilder<{ + name: TName; + dataType: 'string'; + columnType: 'SingleStoreTimestampString'; + data: string; + driverParam: string | number; + enumValues: undefined; + generated: undefined; +}>; + +export class SingleStoreTimestampStringBuilder< + T extends ColumnBuilderBaseConfig<'string', 'SingleStoreTimestampString'>, +> extends SingleStoreDateColumnBaseBuilder { + static override readonly [entityKind]: string = 'SingleStoreTimestampStringBuilder'; + + constructor(name: T['name']) { + super(name, 'string', 'SingleStoreTimestampString'); + } + + /** @internal */ + override build( + table: AnySingleStoreTable<{ name: TTableName }>, + ): SingleStoreTimestampString> { + return new SingleStoreTimestampString>( + table, + this.config as ColumnBuilderRuntimeConfig, + ); + } + + override defaultNow() { + return this.default(sql`CURRENT_TIMESTAMP`); + } +} + +export class SingleStoreTimestampString> + extends SingleStoreDateBaseColumn +{ + static override readonly [entityKind]: string = 'SingleStoreTimestampString'; + + getSQLType(): string { + return `timestamp`; + } +} + +export interface SingleStoreTimestampConfig { + mode?: TMode; +} + +export function timestamp(): SingleStoreTimestampBuilderInitial<''>; +export function timestamp( + config?: SingleStoreTimestampConfig, +): Equal extends true ? SingleStoreTimestampStringBuilderInitial<''> + : SingleStoreTimestampBuilderInitial<''>; +export function timestamp( + name: TName, + config?: SingleStoreTimestampConfig, +): Equal extends true ? SingleStoreTimestampStringBuilderInitial + : SingleStoreTimestampBuilderInitial; +export function timestamp(a?: string | SingleStoreTimestampConfig, b: SingleStoreTimestampConfig = {}) { + const { name, config } = getColumnNameAndConfig(a, b); + if (config?.mode === 'string') { + return new SingleStoreTimestampStringBuilder(name); + } + return new SingleStoreTimestampBuilder(name); +} diff --git a/drizzle-orm/src/singlestore-core/columns/tinyint.ts b/drizzle-orm/src/singlestore-core/columns/tinyint.ts new file mode 100644 index 000000000..090619a6d --- /dev/null +++ b/drizzle-orm/src/singlestore-core/columns/tinyint.ts @@ -0,0 +1,68 @@ +import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; +import type { ColumnBaseConfig } from '~/column.ts'; +import { entityKind } from '~/entity.ts'; +import type { AnySingleStoreTable } from '~/singlestore-core/table.ts'; +import { getColumnNameAndConfig } from '~/utils.ts'; +import { SingleStoreColumnBuilderWithAutoIncrement, SingleStoreColumnWithAutoIncrement } from './common.ts'; +import type { SingleStoreIntConfig } from './int.ts'; + +export type SingleStoreTinyIntBuilderInitial = SingleStoreTinyIntBuilder<{ + name: TName; + dataType: 'number'; + columnType: 'SingleStoreTinyInt'; + data: number; + driverParam: number | string; + enumValues: undefined; + generated: undefined; +}>; + +export class SingleStoreTinyIntBuilder> + extends SingleStoreColumnBuilderWithAutoIncrement +{ + static override readonly [entityKind]: string = 'SingleStoreTinyIntBuilder'; + + constructor(name: T['name'], config?: SingleStoreIntConfig) { + super(name, 'number', 'SingleStoreTinyInt'); + this.config.unsigned = config ? config.unsigned : false; + } + + /** @internal */ + override build( + table: AnySingleStoreTable<{ name: TTableName }>, + ): SingleStoreTinyInt> { + return new SingleStoreTinyInt>( + table, + this.config as ColumnBuilderRuntimeConfig, + ); + } +} + +export class SingleStoreTinyInt> + extends SingleStoreColumnWithAutoIncrement +{ + static override readonly [entityKind]: string = 'SingleStoreTinyInt'; + + getSQLType(): string { + return `tinyint${this.config.unsigned ? ' unsigned' : ''}`; + } + + override mapFromDriverValue(value: number | string): number { + if (typeof value === 'string') { + return Number(value); + } + return value; + } +} + +export function tinyint(): SingleStoreTinyIntBuilderInitial<''>; +export function tinyint( + config?: SingleStoreIntConfig, +): SingleStoreTinyIntBuilderInitial<''>; +export function tinyint( + name: TName, + config?: SingleStoreIntConfig, +): SingleStoreTinyIntBuilderInitial; +export function tinyint(a?: string | SingleStoreIntConfig, b?: SingleStoreIntConfig) { + const { name, config } = getColumnNameAndConfig(a, b); + return new SingleStoreTinyIntBuilder(name, config); +} diff --git a/drizzle-orm/src/singlestore-core/columns/varbinary.ts b/drizzle-orm/src/singlestore-core/columns/varbinary.ts new file mode 100644 index 000000000..c55aa8071 --- /dev/null +++ b/drizzle-orm/src/singlestore-core/columns/varbinary.ts @@ -0,0 +1,66 @@ +import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; +import type { ColumnBaseConfig } from '~/column.ts'; +import { entityKind } from '~/entity.ts'; +import type { AnySingleStoreTable } from '~/singlestore-core/table.ts'; +import { getColumnNameAndConfig } from '~/utils.ts'; +import { SingleStoreColumn, SingleStoreColumnBuilder } from './common.ts'; + +export type SingleStoreVarBinaryBuilderInitial = SingleStoreVarBinaryBuilder<{ + name: TName; + dataType: 'string'; + columnType: 'SingleStoreVarBinary'; + data: string; + driverParam: string; + enumValues: undefined; + generated: undefined; +}>; + +export class SingleStoreVarBinaryBuilder> + extends SingleStoreColumnBuilder +{ + static override readonly [entityKind]: string = 'SingleStoreVarBinaryBuilder'; + + /** @internal */ + constructor(name: T['name'], config: SingleStoreVarbinaryOptions) { + super(name, 'string', 'SingleStoreVarBinary'); + this.config.length = config?.length; + } + + /** @internal */ + override build( + table: AnySingleStoreTable<{ name: TTableName }>, + ): SingleStoreVarBinary> { + return new SingleStoreVarBinary>( + table, + this.config as ColumnBuilderRuntimeConfig, + ); + } +} + +export class SingleStoreVarBinary< + T extends ColumnBaseConfig<'string', 'SingleStoreVarBinary'>, +> extends SingleStoreColumn { + static override readonly [entityKind]: string = 'SingleStoreVarBinary'; + + length: number | undefined = this.config.length; + + getSQLType(): string { + return this.length === undefined ? `varbinary` : `varbinary(${this.length})`; + } +} + +export interface SingleStoreVarbinaryOptions { + length: number; +} + +export function varbinary( + config: SingleStoreVarbinaryOptions, +): SingleStoreVarBinaryBuilderInitial<''>; +export function varbinary( + name: TName, + config: SingleStoreVarbinaryOptions, +): SingleStoreVarBinaryBuilderInitial; +export function varbinary(a?: string | SingleStoreVarbinaryOptions, b?: SingleStoreVarbinaryOptions) { + const { name, config } = getColumnNameAndConfig(a, b); + return new SingleStoreVarBinaryBuilder(name, config); +} diff --git a/drizzle-orm/src/singlestore-core/columns/varchar.ts b/drizzle-orm/src/singlestore-core/columns/varchar.ts new file mode 100644 index 000000000..2c39491d7 --- /dev/null +++ b/drizzle-orm/src/singlestore-core/columns/varchar.ts @@ -0,0 +1,75 @@ +import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; +import type { ColumnBaseConfig } from '~/column.ts'; +import { entityKind } from '~/entity.ts'; +import type { AnySingleStoreTable } from '~/singlestore-core/table.ts'; +import { getColumnNameAndConfig, type Writable } from '~/utils.ts'; +import { SingleStoreColumn, SingleStoreColumnBuilder } from './common.ts'; + +export type SingleStoreVarCharBuilderInitial = + SingleStoreVarCharBuilder< + { + name: TName; + dataType: 'string'; + columnType: 'SingleStoreVarChar'; + data: TEnum[number]; + driverParam: number | string; + enumValues: TEnum; + generated: undefined; + } + >; + +export class SingleStoreVarCharBuilder> + extends SingleStoreColumnBuilder> +{ + static override readonly [entityKind]: string = 'SingleStoreVarCharBuilder'; + + /** @internal */ + constructor(name: T['name'], config: SingleStoreVarCharConfig) { + super(name, 'string', 'SingleStoreVarChar'); + this.config.length = config.length; + this.config.enum = config.enum; + } + + /** @internal */ + override build( + table: AnySingleStoreTable<{ name: TTableName }>, + ): SingleStoreVarChar & { enumValues: T['enumValues'] }> { + return new SingleStoreVarChar & { enumValues: T['enumValues'] }>( + table, + this.config as ColumnBuilderRuntimeConfig, + ); + } +} + +export class SingleStoreVarChar> + extends SingleStoreColumn> +{ + static override readonly [entityKind]: string = 'SingleStoreVarChar'; + + readonly length: number | undefined = this.config.length; + + override readonly enumValues = this.config.enum; + + getSQLType(): string { + return this.length === undefined ? `varchar` : `varchar(${this.length})`; + } +} + +export interface SingleStoreVarCharConfig< + TEnum extends string[] | readonly string[] | undefined = string[] | readonly string[] | undefined, +> { + length: number; + enum?: TEnum; +} + +export function varchar>( + config: SingleStoreVarCharConfig>, +): SingleStoreVarCharBuilderInitial<'', Writable>; +export function varchar>( + name: TName, + config: SingleStoreVarCharConfig>, +): SingleStoreVarCharBuilderInitial>; +export function varchar(a?: string | SingleStoreVarCharConfig, b?: SingleStoreVarCharConfig): any { + const { name, config } = getColumnNameAndConfig(a, b); + return new SingleStoreVarCharBuilder(name, config as any); +} diff --git a/drizzle-orm/src/singlestore-core/columns/year.ts b/drizzle-orm/src/singlestore-core/columns/year.ts new file mode 100644 index 000000000..37f3d55a3 --- /dev/null +++ b/drizzle-orm/src/singlestore-core/columns/year.ts @@ -0,0 +1,51 @@ +import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; +import type { ColumnBaseConfig } from '~/column.ts'; +import { entityKind } from '~/entity.ts'; +import type { AnySingleStoreTable } from '~/singlestore-core/table.ts'; +import { SingleStoreColumn, SingleStoreColumnBuilder } from './common.ts'; + +export type SingleStoreYearBuilderInitial = SingleStoreYearBuilder<{ + name: TName; + dataType: 'number'; + columnType: 'SingleStoreYear'; + data: number; + driverParam: number; + enumValues: undefined; + generated: undefined; +}>; + +export class SingleStoreYearBuilder> + extends SingleStoreColumnBuilder +{ + static override readonly [entityKind]: string = 'SingleStoreYearBuilder'; + + constructor(name: T['name']) { + super(name, 'number', 'SingleStoreYear'); + } + + /** @internal */ + override build( + table: AnySingleStoreTable<{ name: TTableName }>, + ): SingleStoreYear> { + return new SingleStoreYear>( + table, + this.config as ColumnBuilderRuntimeConfig, + ); + } +} + +export class SingleStoreYear< + T extends ColumnBaseConfig<'number', 'SingleStoreYear'>, +> extends SingleStoreColumn { + static override readonly [entityKind]: string = 'SingleStoreYear'; + + getSQLType(): string { + return `year`; + } +} + +export function year(): SingleStoreYearBuilderInitial<''>; +export function year(name: TName): SingleStoreYearBuilderInitial; +export function year(name?: string) { + return new SingleStoreYearBuilder(name ?? ''); +} diff --git a/drizzle-orm/src/singlestore-core/db.ts b/drizzle-orm/src/singlestore-core/db.ts new file mode 100644 index 000000000..1d64448da --- /dev/null +++ b/drizzle-orm/src/singlestore-core/db.ts @@ -0,0 +1,516 @@ +import type { ResultSetHeader } from 'mysql2/promise'; +import { entityKind } from '~/entity.ts'; +import type { TypedQueryBuilder } from '~/query-builders/query-builder.ts'; +import type { ExtractTablesWithRelations, RelationalSchemaConfig, TablesRelationalConfig } from '~/relations.ts'; +import { SelectionProxyHandler } from '~/selection-proxy.ts'; +import type { SingleStoreDriverDatabase } from '~/singlestore/driver.ts'; +import { type ColumnsSelection, type SQL, sql, type SQLWrapper } from '~/sql/sql.ts'; +import { WithSubquery } from '~/subquery.ts'; +import type { SingleStoreDialect } from './dialect.ts'; +import { SingleStoreCountBuilder } from './query-builders/count.ts'; +import { + QueryBuilder, + SingleStoreDeleteBase, + SingleStoreInsertBuilder, + SingleStoreSelectBuilder, + SingleStoreUpdateBuilder, +} from './query-builders/index.ts'; +import type { SelectedFields } from './query-builders/select.types.ts'; +import type { + PreparedQueryHKTBase, + SingleStoreQueryResultHKT, + SingleStoreQueryResultKind, + SingleStoreSession, + SingleStoreTransaction, + SingleStoreTransactionConfig, +} from './session.ts'; +import type { WithSubqueryWithSelection } from './subquery.ts'; +import type { SingleStoreTable } from './table.ts'; + +export class SingleStoreDatabase< + TQueryResult extends SingleStoreQueryResultHKT, + TPreparedQueryHKT extends PreparedQueryHKTBase, + TFullSchema extends Record = {}, + TSchema extends TablesRelationalConfig = ExtractTablesWithRelations, +> { + static readonly [entityKind]: string = 'SingleStoreDatabase'; + + declare readonly _: { + readonly schema: TSchema | undefined; + readonly fullSchema: TFullSchema; + readonly tableNamesMap: Record; + }; + + // We are waiting for SingleStore support for `json_array` function + /**@inrernal */ + query: unknown; + + constructor( + /** @internal */ + readonly dialect: SingleStoreDialect, + /** @internal */ + readonly session: SingleStoreSession, + schema: RelationalSchemaConfig | undefined, + ) { + this._ = schema + ? { + schema: schema.schema, + fullSchema: schema.fullSchema as TFullSchema, + tableNamesMap: schema.tableNamesMap, + } + : { + schema: undefined, + fullSchema: {} as TFullSchema, + tableNamesMap: {}, + }; + this.query = {} as typeof this['query']; + // this.queryNotSupported = true; + // if (this._.schema) { + // for (const [tableName, columns] of Object.entries(this._.schema)) { + // (this.query as SingleStoreDatabase>['query'])[tableName] = + // new RelationalQueryBuilder( + // schema!.fullSchema, + // this._.schema, + // this._.tableNamesMap, + // schema!.fullSchema[tableName] as SingleStoreTable, + // columns, + // dialect, + // session, + // ); + // } + // } + } + + /** + * Creates a subquery that defines a temporary named result set as a CTE. + * + * It is useful for breaking down complex queries into simpler parts and for reusing the result set in subsequent parts of the query. + * + * See docs: {@link https://orm.drizzle.team/docs/select#with-clause} + * + * @param alias The alias for the subquery. + * + * Failure to provide an alias will result in a DrizzleTypeError, preventing the subquery from being referenced in other queries. + * + * @example + * + * ```ts + * // Create a subquery with alias 'sq' and use it in the select query + * const sq = db.$with('sq').as(db.select().from(users).where(eq(users.id, 42))); + * + * const result = await db.with(sq).select().from(sq); + * ``` + * + * To select arbitrary SQL values as fields in a CTE and reference them in other CTEs or in the main query, you need to add aliases to them: + * + * ```ts + * // Select an arbitrary SQL value as a field in a CTE and reference it in the main query + * const sq = db.$with('sq').as(db.select({ + * name: sql`upper(${users.name})`.as('name'), + * }) + * .from(users)); + * + * const result = await db.with(sq).select({ name: sq.name }).from(sq); + * ``` + */ + $with(alias: TAlias) { + const self = this; + return { + as( + qb: TypedQueryBuilder | ((qb: QueryBuilder) => TypedQueryBuilder), + ): WithSubqueryWithSelection { + if (typeof qb === 'function') { + qb = qb(new QueryBuilder(self.dialect)); + } + + return new Proxy( + new WithSubquery(qb.getSQL(), qb.getSelectedFields() as SelectedFields, alias, true), + new SelectionProxyHandler({ alias, sqlAliasedBehavior: 'alias', sqlBehavior: 'error' }), + ) as WithSubqueryWithSelection; + }, + }; + } + + $count( + source: SingleStoreTable | SQL | SQLWrapper, // SingleStoreViewBase | + filters?: SQL, + ) { + return new SingleStoreCountBuilder({ source, filters, session: this.session }); + } + + /** + * Incorporates a previously defined CTE (using `$with`) into the main query. + * + * This method allows the main query to reference a temporary named result set. + * + * See docs: {@link https://orm.drizzle.team/docs/select#with-clause} + * + * @param queries The CTEs to incorporate into the main query. + * + * @example + * + * ```ts + * // Define a subquery 'sq' as a CTE using $with + * const sq = db.$with('sq').as(db.select().from(users).where(eq(users.id, 42))); + * + * // Incorporate the CTE 'sq' into the main query and select from it + * const result = await db.with(sq).select().from(sq); + * ``` + */ + with(...queries: WithSubquery[]) { + const self = this; + + /** + * Creates a select query. + * + * Calling this method with no arguments will select all columns from the table. Pass a selection object to specify the columns you want to select. + * + * Use `.from()` method to specify which table to select from. + * + * See docs: {@link https://orm.drizzle.team/docs/select} + * + * @param fields The selection object. + * + * @example + * + * ```ts + * // Select all columns and all rows from the 'cars' table + * const allCars: Car[] = await db.select().from(cars); + * + * // Select specific columns and all rows from the 'cars' table + * const carsIdsAndBrands: { id: number; brand: string }[] = await db.select({ + * id: cars.id, + * brand: cars.brand + * }) + * .from(cars); + * ``` + * + * Like in SQL, you can use arbitrary expressions as selection fields, not just table columns: + * + * ```ts + * // Select specific columns along with expression and all rows from the 'cars' table + * const carsIdsAndLowerNames: { id: number; lowerBrand: string }[] = await db.select({ + * id: cars.id, + * lowerBrand: sql`lower(${cars.brand})`, + * }) + * .from(cars); + * ``` + */ + function select(): SingleStoreSelectBuilder; + function select( + fields: TSelection, + ): SingleStoreSelectBuilder; + function select(fields?: SelectedFields): SingleStoreSelectBuilder { + return new SingleStoreSelectBuilder({ + fields: fields ?? undefined, + session: self.session, + dialect: self.dialect, + withList: queries, + }); + } + + /** + * Adds `distinct` expression to the select query. + * + * Calling this method will return only unique values. When multiple columns are selected, it returns rows with unique combinations of values in these columns. + * + * Use `.from()` method to specify which table to select from. + * + * See docs: {@link https://orm.drizzle.team/docs/select#distinct} + * + * @param fields The selection object. + * + * @example + * ```ts + * // Select all unique rows from the 'cars' table + * await db.selectDistinct() + * .from(cars) + * .orderBy(cars.id, cars.brand, cars.color); + * + * // Select all unique brands from the 'cars' table + * await db.selectDistinct({ brand: cars.brand }) + * .from(cars) + * .orderBy(cars.brand); + * ``` + */ + function selectDistinct(): SingleStoreSelectBuilder; + function selectDistinct( + fields: TSelection, + ): SingleStoreSelectBuilder; + function selectDistinct( + fields?: SelectedFields, + ): SingleStoreSelectBuilder { + return new SingleStoreSelectBuilder({ + fields: fields ?? undefined, + session: self.session, + dialect: self.dialect, + withList: queries, + distinct: true, + }); + } + + /** + * Creates an update query. + * + * Calling this method without `.where()` clause will update all rows in a table. The `.where()` clause specifies which rows should be updated. + * + * Use `.set()` method to specify which values to update. + * + * See docs: {@link https://orm.drizzle.team/docs/update} + * + * @param table The table to update. + * + * @example + * + * ```ts + * // Update all rows in the 'cars' table + * await db.update(cars).set({ color: 'red' }); + * + * // Update rows with filters and conditions + * await db.update(cars).set({ color: 'red' }).where(eq(cars.brand, 'BMW')); + * ``` + */ + function update( + table: TTable, + ): SingleStoreUpdateBuilder { + return new SingleStoreUpdateBuilder(table, self.session, self.dialect, queries); + } + + /** + * Creates a delete query. + * + * Calling this method without `.where()` clause will delete all rows in a table. The `.where()` clause specifies which rows should be deleted. + * + * See docs: {@link https://orm.drizzle.team/docs/delete} + * + * @param table The table to delete from. + * + * @example + * + * ```ts + * // Delete all rows in the 'cars' table + * await db.delete(cars); + * + * // Delete rows with filters and conditions + * await db.delete(cars).where(eq(cars.color, 'green')); + * ``` + */ + function delete_( + table: TTable, + ): SingleStoreDeleteBase { + return new SingleStoreDeleteBase(table, self.session, self.dialect, queries); + } + + return { select, selectDistinct, update, delete: delete_ }; + } + + /** + * Creates a select query. + * + * Calling this method with no arguments will select all columns from the table. Pass a selection object to specify the columns you want to select. + * + * Use `.from()` method to specify which table to select from. + * + * See docs: {@link https://orm.drizzle.team/docs/select} + * + * @param fields The selection object. + * + * @example + * + * ```ts + * // Select all columns and all rows from the 'cars' table + * const allCars: Car[] = await db.select().from(cars); + * + * // Select specific columns and all rows from the 'cars' table + * const carsIdsAndBrands: { id: number; brand: string }[] = await db.select({ + * id: cars.id, + * brand: cars.brand + * }) + * .from(cars); + * ``` + * + * Like in SQL, you can use arbitrary expressions as selection fields, not just table columns: + * + * ```ts + * // Select specific columns along with expression and all rows from the 'cars' table + * const carsIdsAndLowerNames: { id: number; lowerBrand: string }[] = await db.select({ + * id: cars.id, + * lowerBrand: sql`lower(${cars.brand})`, + * }) + * .from(cars); + * ``` + */ + select(): SingleStoreSelectBuilder; + select( + fields: TSelection, + ): SingleStoreSelectBuilder; + select(fields?: SelectedFields): SingleStoreSelectBuilder { + return new SingleStoreSelectBuilder({ fields: fields ?? undefined, session: this.session, dialect: this.dialect }); + } + + /** + * Adds `distinct` expression to the select query. + * + * Calling this method will return only unique values. When multiple columns are selected, it returns rows with unique combinations of values in these columns. + * + * Use `.from()` method to specify which table to select from. + * + * See docs: {@link https://orm.drizzle.team/docs/select#distinct} + * + * @param fields The selection object. + * + * @example + * ```ts + * // Select all unique rows from the 'cars' table + * await db.selectDistinct() + * .from(cars) + * .orderBy(cars.id, cars.brand, cars.color); + * + * // Select all unique brands from the 'cars' table + * await db.selectDistinct({ brand: cars.brand }) + * .from(cars) + * .orderBy(cars.brand); + * ``` + */ + selectDistinct(): SingleStoreSelectBuilder; + selectDistinct( + fields: TSelection, + ): SingleStoreSelectBuilder; + selectDistinct(fields?: SelectedFields): SingleStoreSelectBuilder { + return new SingleStoreSelectBuilder({ + fields: fields ?? undefined, + session: this.session, + dialect: this.dialect, + distinct: true, + }); + } + + /** + * Creates an update query. + * + * Calling this method without `.where()` clause will update all rows in a table. The `.where()` clause specifies which rows should be updated. + * + * Use `.set()` method to specify which values to update. + * + * See docs: {@link https://orm.drizzle.team/docs/update} + * + * @param table The table to update. + * + * @example + * + * ```ts + * // Update all rows in the 'cars' table + * await db.update(cars).set({ color: 'red' }); + * + * // Update rows with filters and conditions + * await db.update(cars).set({ color: 'red' }).where(eq(cars.brand, 'BMW')); + * ``` + */ + update( + table: TTable, + ): SingleStoreUpdateBuilder { + return new SingleStoreUpdateBuilder(table, this.session, this.dialect); + } + + /** + * Creates an insert query. + * + * Calling this method will create new rows in a table. Use `.values()` method to specify which values to insert. + * + * See docs: {@link https://orm.drizzle.team/docs/insert} + * + * @param table The table to insert into. + * + * @example + * + * ```ts + * // Insert one row + * await db.insert(cars).values({ brand: 'BMW' }); + * + * // Insert multiple rows + * await db.insert(cars).values([{ brand: 'BMW' }, { brand: 'Porsche' }]); + * ``` + */ + insert( + table: TTable, + ): SingleStoreInsertBuilder { + return new SingleStoreInsertBuilder(table, this.session, this.dialect); + } + + /** + * Creates a delete query. + * + * Calling this method without `.where()` clause will delete all rows in a table. The `.where()` clause specifies which rows should be deleted. + * + * See docs: {@link https://orm.drizzle.team/docs/delete} + * + * @param table The table to delete from. + * + * @example + * + * ```ts + * // Delete all rows in the 'cars' table + * await db.delete(cars); + * + * // Delete rows with filters and conditions + * await db.delete(cars).where(eq(cars.color, 'green')); + * ``` + */ + delete( + table: TTable, + ): SingleStoreDeleteBase { + return new SingleStoreDeleteBase(table, this.session, this.dialect); + } + + execute( + query: SQLWrapper | string, + ): Promise> { + return this.session.execute(typeof query === 'string' ? sql.raw(query) : query.getSQL()); + } + + transaction( + transaction: ( + tx: SingleStoreTransaction, + config?: SingleStoreTransactionConfig, + ) => Promise, + config?: SingleStoreTransactionConfig, + ): Promise { + return this.session.transaction(transaction, config); + } +} + +export type SingleStoreWithReplicas = Q & { $primary: Q }; + +export const withReplicas = < + Q extends SingleStoreDriverDatabase, +>( + primary: Q, + replicas: [Q, ...Q[]], + getReplica: (replicas: Q[]) => Q = () => replicas[Math.floor(Math.random() * replicas.length)]!, +): SingleStoreWithReplicas => { + const select: Q['select'] = (...args: []) => getReplica(replicas).select(...args); + const selectDistinct: Q['selectDistinct'] = (...args: []) => getReplica(replicas).selectDistinct(...args); + const $with: Q['with'] = (...args: []) => getReplica(replicas).with(...args); + + const update: Q['update'] = (...args: [any]) => primary.update(...args); + const insert: Q['insert'] = (...args: [any]) => primary.insert(...args); + const $delete: Q['delete'] = (...args: [any]) => primary.delete(...args); + const execute: Q['execute'] = (...args: [any]) => primary.execute(...args); + const transaction: Q['transaction'] = (...args: [any, any]) => primary.transaction(...args); + + return { + ...primary, + update, + insert, + delete: $delete, + execute, + transaction, + $primary: primary, + select, + selectDistinct, + with: $with, + get query() { + return getReplica(replicas).query; + }, + }; +}; diff --git a/drizzle-orm/src/singlestore-core/dialect.ts b/drizzle-orm/src/singlestore-core/dialect.ts new file mode 100644 index 000000000..99a485ac6 --- /dev/null +++ b/drizzle-orm/src/singlestore-core/dialect.ts @@ -0,0 +1,813 @@ +import { aliasedTable, aliasedTableColumn, mapColumnsInAliasedSQLToAlias, mapColumnsInSQLToAlias } from '~/alias.ts'; +import { CasingCache } from '~/casing.ts'; +import { Column } from '~/column.ts'; +import { entityKind, is } from '~/entity.ts'; +import { DrizzleError } from '~/errors.ts'; +import { and, eq } from '~/expressions.ts'; +import type { MigrationConfig, MigrationMeta } from '~/migrator.ts'; +import { + type BuildRelationalQueryResult, + type DBQueryConfig, + getOperators, + getOrderByOperators, + Many, + normalizeRelation, + One, + type Relation, + type TableRelationalConfig, + type TablesRelationalConfig, +} from '~/relations.ts'; +import type { Name, Placeholder, QueryWithTypings, SQLChunk } from '~/sql/sql.ts'; +import { Param, SQL, sql, View } from '~/sql/sql.ts'; +import { Subquery } from '~/subquery.ts'; +import { getTableName, getTableUniqueName, Table } from '~/table.ts'; +import { type Casing, orderSelectedFields, type UpdateSet } from '~/utils.ts'; +import { ViewBaseConfig } from '~/view-common.ts'; +import { SingleStoreColumn } from './columns/common.ts'; +import type { SingleStoreDeleteConfig } from './query-builders/delete.ts'; +import type { SingleStoreInsertConfig } from './query-builders/insert.ts'; +import type { + SelectedFieldsOrdered, + SingleStoreSelectConfig, + SingleStoreSelectJoinConfig, +} from './query-builders/select.types.ts'; +import type { SingleStoreUpdateConfig } from './query-builders/update.ts'; +import type { SingleStoreSession } from './session.ts'; +import { SingleStoreTable } from './table.ts'; +/* import { SingleStoreViewBase } from './view-base.ts'; */ + +export interface SingleStoreDialectConfig { + casing?: Casing; +} + +export class SingleStoreDialect { + static readonly [entityKind]: string = 'SingleStoreDialect'; + + /** @internal */ + readonly casing: CasingCache; + + constructor(config?: SingleStoreDialectConfig) { + this.casing = new CasingCache(config?.casing); + } + + async migrate( + migrations: MigrationMeta[], + session: SingleStoreSession, + config: Omit, + ): Promise { + const migrationsTable = config.migrationsTable ?? '__drizzle_migrations'; + const migrationTableCreate = sql` + create table if not exists ${sql.identifier(migrationsTable)} ( + id serial primary key, + hash text not null, + created_at bigint + ) + `; + await session.execute(migrationTableCreate); + + const dbMigrations = await session.all<{ id: number; hash: string; created_at: string }>( + sql`select id, hash, created_at from ${sql.identifier(migrationsTable)} order by created_at desc limit 1`, + ); + + const lastDbMigration = dbMigrations[0]; + + await session.transaction(async (tx) => { + for (const migration of migrations) { + if ( + !lastDbMigration + || Number(lastDbMigration.created_at) < migration.folderMillis + ) { + for (const stmt of migration.sql) { + await tx.execute(sql.raw(stmt)); + } + await tx.execute( + sql`insert into ${ + sql.identifier(migrationsTable) + } (\`hash\`, \`created_at\`) values(${migration.hash}, ${migration.folderMillis})`, + ); + } + } + }); + } + + escapeName(name: string): string { + return `\`${name}\``; + } + + escapeParam(_num: number): string { + return `?`; + } + + escapeString(str: string): string { + return `'${str.replace(/'/g, "''")}'`; + } + + private buildWithCTE(queries: Subquery[] | undefined): SQL | undefined { + if (!queries?.length) return undefined; + + const withSqlChunks = [sql`with `]; + for (const [i, w] of queries.entries()) { + withSqlChunks.push(sql`${sql.identifier(w._.alias)} as (${w._.sql})`); + if (i < queries.length - 1) { + withSqlChunks.push(sql`, `); + } + } + withSqlChunks.push(sql` `); + return sql.join(withSqlChunks); + } + + buildDeleteQuery({ table, where, returning, withList, limit, orderBy }: SingleStoreDeleteConfig): SQL { + const withSql = this.buildWithCTE(withList); + + const returningSql = returning + ? sql` returning ${this.buildSelection(returning, { isSingleTable: true })}` + : undefined; + + const whereSql = where ? sql` where ${where}` : undefined; + + const orderBySql = this.buildOrderBy(orderBy); + + const limitSql = this.buildLimit(limit); + + return sql`${withSql}delete from ${table}${whereSql}${orderBySql}${limitSql}${returningSql}`; + } + + buildUpdateSet(table: SingleStoreTable, set: UpdateSet): SQL { + const tableColumns = table[Table.Symbol.Columns]; + + const columnNames = Object.keys(tableColumns).filter((colName) => + set[colName] !== undefined || tableColumns[colName]?.onUpdateFn !== undefined + ); + + const setSize = columnNames.length; + return sql.join(columnNames.flatMap((colName, i) => { + const col = tableColumns[colName]!; + + const value = set[colName] ?? sql.param(col.onUpdateFn!(), col); + const res = sql`${sql.identifier(this.casing.getColumnCasing(col))} = ${value}`; + + if (i < setSize - 1) { + return [res, sql.raw(', ')]; + } + return [res]; + })); + } + + buildUpdateQuery({ table, set, where, returning, withList, limit, orderBy }: SingleStoreUpdateConfig): SQL { + const withSql = this.buildWithCTE(withList); + + const setSql = this.buildUpdateSet(table, set); + + const returningSql = returning + ? sql` returning ${this.buildSelection(returning, { isSingleTable: true })}` + : undefined; + + const whereSql = where ? sql` where ${where}` : undefined; + + const orderBySql = this.buildOrderBy(orderBy); + + const limitSql = this.buildLimit(limit); + + return sql`${withSql}update ${table} set ${setSql}${whereSql}${orderBySql}${limitSql}${returningSql}`; + } + + /** + * Builds selection SQL with provided fields/expressions + * + * Examples: + * + * `select from` + * + * `insert ... returning ` + * + * If `isSingleTable` is true, then columns won't be prefixed with table name + */ + private buildSelection( + fields: SelectedFieldsOrdered, + { isSingleTable = false }: { isSingleTable?: boolean } = {}, + ): SQL { + const columnsLen = fields.length; + + const chunks = fields + .flatMap(({ field }, i) => { + const chunk: SQLChunk[] = []; + + if (is(field, SQL.Aliased) && field.isSelectionField) { + chunk.push(sql.identifier(field.fieldAlias)); + } else if (is(field, SQL.Aliased) || is(field, SQL)) { + const query = is(field, SQL.Aliased) ? field.sql : field; + + if (isSingleTable) { + chunk.push( + new SQL( + query.queryChunks.map((c) => { + if (is(c, SingleStoreColumn)) { + return sql.identifier(this.casing.getColumnCasing(c)); + } + return c; + }), + ), + ); + } else { + chunk.push(query); + } + + if (is(field, SQL.Aliased)) { + chunk.push(sql` as ${sql.identifier(field.fieldAlias)}`); + } + } else if (is(field, Column)) { + if (isSingleTable) { + chunk.push(sql.identifier(this.casing.getColumnCasing(field))); + } else { + chunk.push(field); + } + } + + if (i < columnsLen - 1) { + chunk.push(sql`, `); + } + + return chunk; + }); + + return sql.join(chunks); + } + + private buildLimit(limit: number | Placeholder | undefined): SQL | undefined { + return typeof limit === 'object' || (typeof limit === 'number' && limit >= 0) + ? sql` limit ${limit}` + : undefined; + } + + private buildOrderBy(orderBy: (SingleStoreColumn | SQL | SQL.Aliased)[] | undefined): SQL | undefined { + return orderBy && orderBy.length > 0 ? sql` order by ${sql.join(orderBy, sql`, `)}` : undefined; + } + + buildSelectQuery( + { + withList, + fields, + fieldsFlat, + where, + having, + table, + joins, + orderBy, + groupBy, + limit, + offset, + lockingClause, + distinct, + setOperators, + }: SingleStoreSelectConfig, + ): SQL { + const fieldsList = fieldsFlat ?? orderSelectedFields(fields); + for (const f of fieldsList) { + if ( + is(f.field, Column) + && getTableName(f.field.table) + !== (is(table, Subquery) + ? table._.alias + /* : is(table, SingleStoreViewBase) + ? table[ViewBaseConfig].name */ + : is(table, SQL) + ? undefined + : getTableName(table)) + && !((table) => + joins?.some(({ alias }) => + alias === (table[Table.Symbol.IsAlias] ? getTableName(table) : table[Table.Symbol.BaseName]) + ))(f.field.table) + ) { + const tableName = getTableName(f.field.table); + throw new Error( + `Your "${ + f.path.join('->') + }" field references a column "${tableName}"."${f.field.name}", but the table "${tableName}" is not part of the query! Did you forget to join it?`, + ); + } + } + + const isSingleTable = !joins || joins.length === 0; + + const withSql = this.buildWithCTE(withList); + + const distinctSql = distinct ? sql` distinct` : undefined; + + const selection = this.buildSelection(fieldsList, { isSingleTable }); + + const tableSql = (() => { + if (is(table, Table) && table[Table.Symbol.OriginalName] !== table[Table.Symbol.Name]) { + return sql`${sql.identifier(table[Table.Symbol.OriginalName])} ${sql.identifier(table[Table.Symbol.Name])}`; + } + + return table; + })(); + + const joinsArray: SQL[] = []; + + if (joins) { + for (const [index, joinMeta] of joins.entries()) { + if (index === 0) { + joinsArray.push(sql` `); + } + const table = joinMeta.table; + const lateralSql = joinMeta.lateral ? sql` lateral` : undefined; + + if (is(table, SingleStoreTable)) { + const tableName = table[SingleStoreTable.Symbol.Name]; + const tableSchema = table[SingleStoreTable.Symbol.Schema]; + const origTableName = table[SingleStoreTable.Symbol.OriginalName]; + const alias = tableName === origTableName ? undefined : joinMeta.alias; + joinsArray.push( + sql`${sql.raw(joinMeta.joinType)} join${lateralSql} ${ + tableSchema ? sql`${sql.identifier(tableSchema)}.` : undefined + }${sql.identifier(origTableName)}${alias && sql` ${sql.identifier(alias)}`} on ${joinMeta.on}`, + ); + } else if (is(table, View)) { + const viewName = table[ViewBaseConfig].name; + const viewSchema = table[ViewBaseConfig].schema; + const origViewName = table[ViewBaseConfig].originalName; + const alias = viewName === origViewName ? undefined : joinMeta.alias; + joinsArray.push( + sql`${sql.raw(joinMeta.joinType)} join${lateralSql} ${ + viewSchema ? sql`${sql.identifier(viewSchema)}.` : undefined + }${sql.identifier(origViewName)}${alias && sql` ${sql.identifier(alias)}`} on ${joinMeta.on}`, + ); + } else { + joinsArray.push( + sql`${sql.raw(joinMeta.joinType)} join${lateralSql} ${table} on ${joinMeta.on}`, + ); + } + if (index < joins.length - 1) { + joinsArray.push(sql` `); + } + } + } + + const joinsSql = sql.join(joinsArray); + + const whereSql = where ? sql` where ${where}` : undefined; + + const havingSql = having ? sql` having ${having}` : undefined; + + const orderBySql = this.buildOrderBy(orderBy); + + const groupBySql = groupBy && groupBy.length > 0 ? sql` group by ${sql.join(groupBy, sql`, `)}` : undefined; + + const limitSql = this.buildLimit(limit); + + const offsetSql = offset ? sql` offset ${offset}` : undefined; + + let lockingClausesSql; + if (lockingClause) { + const { config, strength } = lockingClause; + lockingClausesSql = sql` for ${sql.raw(strength)}`; + if (config.noWait) { + lockingClausesSql.append(sql` no wait`); + } else if (config.skipLocked) { + lockingClausesSql.append(sql` skip locked`); + } + } + + const finalQuery = + sql`${withSql}select${distinctSql} ${selection} from ${tableSql}${joinsSql}${whereSql}${groupBySql}${havingSql}${orderBySql}${limitSql}${offsetSql}${lockingClausesSql}`; + + if (setOperators.length > 0) { + return this.buildSetOperations(finalQuery, setOperators); + } + + return finalQuery; + } + + buildSetOperations(leftSelect: SQL, setOperators: SingleStoreSelectConfig['setOperators']): SQL { + const [setOperator, ...rest] = setOperators; + + if (!setOperator) { + throw new Error('Cannot pass undefined values to any set operator'); + } + + if (rest.length === 0) { + return this.buildSetOperationQuery({ leftSelect, setOperator }); + } + + // Some recursive magic here + return this.buildSetOperations( + this.buildSetOperationQuery({ leftSelect, setOperator }), + rest, + ); + } + + buildSetOperationQuery({ + leftSelect, + setOperator: { type, isAll, rightSelect, limit, orderBy, offset }, + }: { leftSelect: SQL; setOperator: SingleStoreSelectConfig['setOperators'][number] }): SQL { + const leftChunk = sql`(${leftSelect.getSQL()}) `; + const rightChunk = sql`(${rightSelect.getSQL()})`; + + let orderBySql; + if (orderBy && orderBy.length > 0) { + const orderByValues: (SQL | Name)[] = []; + + // The next bit is necessary because the sql operator replaces ${table.column} with `table`.`column` + // which is invalid SingleStore syntax, Table from one of the SELECTs cannot be used in global ORDER clause + for (const orderByUnit of orderBy) { + if (is(orderByUnit, SingleStoreColumn)) { + orderByValues.push(sql.identifier(this.casing.getColumnCasing(orderByUnit))); + } else if (is(orderByUnit, SQL)) { + for (let i = 0; i < orderByUnit.queryChunks.length; i++) { + const chunk = orderByUnit.queryChunks[i]; + + if (is(chunk, SingleStoreColumn)) { + orderByUnit.queryChunks[i] = sql.identifier(this.casing.getColumnCasing(chunk)); + } + } + + orderByValues.push(sql`${orderByUnit}`); + } else { + orderByValues.push(sql`${orderByUnit}`); + } + } + + orderBySql = sql` order by ${sql.join(orderByValues, sql`, `)} `; + } + + const limitSql = typeof limit === 'object' || (typeof limit === 'number' && limit >= 0) + ? sql` limit ${limit}` + : undefined; + + const operatorChunk = sql.raw(`${type} ${isAll ? 'all ' : ''}`); + + const offsetSql = offset ? sql` offset ${offset}` : undefined; + + return sql`${leftChunk}${operatorChunk}${rightChunk}${orderBySql}${limitSql}${offsetSql}`; + } + + buildInsertQuery( + { table, values, ignore, onConflict }: SingleStoreInsertConfig, + ): { sql: SQL; generatedIds: Record[] } { + // const isSingleValue = values.length === 1; + const valuesSqlList: ((SQLChunk | SQL)[] | SQL)[] = []; + const columns: Record = table[Table.Symbol.Columns]; + const colEntries: [string, SingleStoreColumn][] = Object.entries(columns).filter(([_, col]) => + !col.shouldDisableInsert() + ); + + const insertOrder = colEntries.map(([, column]) => sql.identifier(this.casing.getColumnCasing(column))); + const generatedIdsResponse: Record[] = []; + + for (const [valueIndex, value] of values.entries()) { + const generatedIds: Record = {}; + + const valueList: (SQLChunk | SQL)[] = []; + for (const [fieldName, col] of colEntries) { + const colValue = value[fieldName]; + if (colValue === undefined || (is(colValue, Param) && colValue.value === undefined)) { + // eslint-disable-next-line unicorn/no-negated-condition + if (col.defaultFn !== undefined) { + const defaultFnResult = col.defaultFn(); + generatedIds[fieldName] = defaultFnResult; + const defaultValue = is(defaultFnResult, SQL) ? defaultFnResult : sql.param(defaultFnResult, col); + valueList.push(defaultValue); + // eslint-disable-next-line unicorn/no-negated-condition + } else if (!col.default && col.onUpdateFn !== undefined) { + const onUpdateFnResult = col.onUpdateFn(); + const newValue = is(onUpdateFnResult, SQL) ? onUpdateFnResult : sql.param(onUpdateFnResult, col); + valueList.push(newValue); + } else { + valueList.push(sql`default`); + } + } else { + if (col.defaultFn && is(colValue, Param)) { + generatedIds[fieldName] = colValue.value; + } + valueList.push(colValue); + } + } + + generatedIdsResponse.push(generatedIds); + valuesSqlList.push(valueList); + if (valueIndex < values.length - 1) { + valuesSqlList.push(sql`, `); + } + } + + const valuesSql = sql.join(valuesSqlList); + + const ignoreSql = ignore ? sql` ignore` : undefined; + + const onConflictSql = onConflict ? sql` on duplicate key ${onConflict}` : undefined; + + return { + sql: sql`insert${ignoreSql} into ${table} ${insertOrder} values ${valuesSql}${onConflictSql}`, + generatedIds: generatedIdsResponse, + }; + } + + sqlToQuery(sql: SQL, invokeSource?: 'indexes' | undefined): QueryWithTypings { + return sql.toQuery({ + casing: this.casing, + escapeName: this.escapeName, + escapeParam: this.escapeParam, + escapeString: this.escapeString, + invokeSource, + }); + } + + buildRelationalQuery({ + fullSchema, + schema, + tableNamesMap, + table, + tableConfig, + queryConfig: config, + tableAlias, + nestedQueryRelation, + joinOn, + }: { + fullSchema: Record; + schema: TablesRelationalConfig; + tableNamesMap: Record; + table: SingleStoreTable; + tableConfig: TableRelationalConfig; + queryConfig: true | DBQueryConfig<'many', true>; + tableAlias: string; + nestedQueryRelation?: Relation; + joinOn?: SQL; + }): BuildRelationalQueryResult { + let selection: BuildRelationalQueryResult['selection'] = []; + let limit, offset, orderBy: SingleStoreSelectConfig['orderBy'], where; + const joins: SingleStoreSelectJoinConfig[] = []; + + if (config === true) { + const selectionEntries = Object.entries(tableConfig.columns); + selection = selectionEntries.map(( + [key, value], + ) => ({ + dbKey: value.name, + tsKey: key, + field: aliasedTableColumn(value as SingleStoreColumn, tableAlias), + relationTableTsKey: undefined, + isJson: false, + selection: [], + })); + } else { + const aliasedColumns = Object.fromEntries( + Object.entries(tableConfig.columns).map(([key, value]) => [key, aliasedTableColumn(value, tableAlias)]), + ); + + if (config.where) { + const whereSql = typeof config.where === 'function' + ? config.where(aliasedColumns, getOperators()) + : config.where; + where = whereSql && mapColumnsInSQLToAlias(whereSql, tableAlias); + } + + const fieldsSelection: { tsKey: string; value: SingleStoreColumn | SQL.Aliased }[] = []; + let selectedColumns: string[] = []; + + // Figure out which columns to select + if (config.columns) { + let isIncludeMode = false; + + for (const [field, value] of Object.entries(config.columns)) { + if (value === undefined) { + continue; + } + + if (field in tableConfig.columns) { + if (!isIncludeMode && value === true) { + isIncludeMode = true; + } + selectedColumns.push(field); + } + } + + if (selectedColumns.length > 0) { + selectedColumns = isIncludeMode + ? selectedColumns.filter((c) => config.columns?.[c] === true) + : Object.keys(tableConfig.columns).filter((key) => !selectedColumns.includes(key)); + } + } else { + // Select all columns if selection is not specified + selectedColumns = Object.keys(tableConfig.columns); + } + + for (const field of selectedColumns) { + const column = tableConfig.columns[field]! as SingleStoreColumn; + fieldsSelection.push({ tsKey: field, value: column }); + } + + let selectedRelations: { + tsKey: string; + queryConfig: true | DBQueryConfig<'many', false>; + relation: Relation; + }[] = []; + + // Figure out which relations to select + if (config.with) { + selectedRelations = Object.entries(config.with) + .filter((entry): entry is [typeof entry[0], NonNullable] => !!entry[1]) + .map(([tsKey, queryConfig]) => ({ tsKey, queryConfig, relation: tableConfig.relations[tsKey]! })); + } + + let extras; + + // Figure out which extras to select + if (config.extras) { + extras = typeof config.extras === 'function' + ? config.extras(aliasedColumns, { sql }) + : config.extras; + for (const [tsKey, value] of Object.entries(extras)) { + fieldsSelection.push({ + tsKey, + value: mapColumnsInAliasedSQLToAlias(value, tableAlias), + }); + } + } + + // Transform `fieldsSelection` into `selection` + // `fieldsSelection` shouldn't be used after this point + for (const { tsKey, value } of fieldsSelection) { + selection.push({ + dbKey: is(value, SQL.Aliased) ? value.fieldAlias : tableConfig.columns[tsKey]!.name, + tsKey, + field: is(value, Column) ? aliasedTableColumn(value, tableAlias) : value, + relationTableTsKey: undefined, + isJson: false, + selection: [], + }); + } + + let orderByOrig = typeof config.orderBy === 'function' + ? config.orderBy(aliasedColumns, getOrderByOperators()) + : config.orderBy ?? []; + if (!Array.isArray(orderByOrig)) { + orderByOrig = [orderByOrig]; + } + orderBy = orderByOrig.map((orderByValue) => { + if (is(orderByValue, Column)) { + return aliasedTableColumn(orderByValue, tableAlias) as SingleStoreColumn; + } + return mapColumnsInSQLToAlias(orderByValue, tableAlias); + }); + + limit = config.limit; + offset = config.offset; + + // Process all relations + for ( + const { + tsKey: selectedRelationTsKey, + queryConfig: selectedRelationConfigValue, + relation, + } of selectedRelations + ) { + const normalizedRelation = normalizeRelation(schema, tableNamesMap, relation); + const relationTableName = getTableUniqueName(relation.referencedTable); + const relationTableTsName = tableNamesMap[relationTableName]!; + const relationTableAlias = `${tableAlias}_${selectedRelationTsKey}`; + const joinOn = and( + ...normalizedRelation.fields.map((field, i) => + eq( + aliasedTableColumn(normalizedRelation.references[i]!, relationTableAlias), + aliasedTableColumn(field, tableAlias), + ) + ), + ); + const builtRelation = this.buildRelationalQuery({ + fullSchema, + schema, + tableNamesMap, + table: fullSchema[relationTableTsName] as SingleStoreTable, + tableConfig: schema[relationTableTsName]!, + queryConfig: is(relation, One) + ? (selectedRelationConfigValue === true + ? { limit: 1 } + : { ...selectedRelationConfigValue, limit: 1 }) + : selectedRelationConfigValue, + tableAlias: relationTableAlias, + joinOn, + nestedQueryRelation: relation, + }); + const field = sql`${sql.identifier(relationTableAlias)}.${sql.identifier('data')}`.as(selectedRelationTsKey); + joins.push({ + on: sql`true`, + table: new Subquery(builtRelation.sql as SQL, {}, relationTableAlias), + alias: relationTableAlias, + joinType: 'left', + lateral: true, + }); + selection.push({ + dbKey: selectedRelationTsKey, + tsKey: selectedRelationTsKey, + field, + relationTableTsKey: relationTableTsName, + isJson: true, + selection: builtRelation.selection, + }); + } + } + + if (selection.length === 0) { + throw new DrizzleError({ message: `No fields selected for table "${tableConfig.tsName}" ("${tableAlias}")` }); + } + + let result; + + where = and(joinOn, where); + + if (nestedQueryRelation) { + let field = sql`JSON_TO_ARRAY(${ + sql.join( + selection.map(({ field, tsKey, isJson }) => + isJson + ? sql`${sql.identifier(`${tableAlias}_${tsKey}`)}.${sql.identifier('data')}` + : is(field, SQL.Aliased) + ? field.sql + : field + ), + sql`, `, + ) + })`; + if (is(nestedQueryRelation, Many)) { + field = sql`json_agg(${field})`; + } + const nestedSelection = [{ + dbKey: 'data', + tsKey: 'data', + field: field.as('data'), + isJson: true, + relationTableTsKey: tableConfig.tsName, + selection, + }]; + + const needsSubquery = limit !== undefined || offset !== undefined || (orderBy?.length ?? 0) > 0; + + if (needsSubquery) { + result = this.buildSelectQuery({ + table: aliasedTable(table, tableAlias), + fields: {}, + fieldsFlat: [ + { + path: [], + field: sql.raw('*'), + }, + ...(((orderBy?.length ?? 0) > 0) + ? [{ + path: [], + field: sql`row_number() over (order by ${sql.join(orderBy!, sql`, `)})`, + }] + : []), + ], + where, + limit, + offset, + setOperators: [], + }); + + where = undefined; + limit = undefined; + offset = undefined; + orderBy = undefined; + } else { + result = aliasedTable(table, tableAlias); + } + + result = this.buildSelectQuery({ + table: is(result, SingleStoreTable) ? result : new Subquery(result, {}, tableAlias), + fields: {}, + fieldsFlat: nestedSelection.map(({ field }) => ({ + path: [], + field: is(field, Column) ? aliasedTableColumn(field, tableAlias) : field, + })), + joins, + where, + limit, + offset, + orderBy, + setOperators: [], + }); + } else { + result = this.buildSelectQuery({ + table: aliasedTable(table, tableAlias), + fields: {}, + fieldsFlat: selection.map(({ field }) => ({ + path: [], + field: is(field, Column) ? aliasedTableColumn(field, tableAlias) : field, + })), + joins, + where, + limit, + offset, + orderBy, + setOperators: [], + }); + } + + return { + tableTsKey: tableConfig.tsName, + sql: result, + selection, + }; + } +} diff --git a/drizzle-orm/src/singlestore-core/expressions.ts b/drizzle-orm/src/singlestore-core/expressions.ts new file mode 100644 index 000000000..6d4284d18 --- /dev/null +++ b/drizzle-orm/src/singlestore-core/expressions.ts @@ -0,0 +1,25 @@ +import { bindIfParam } from '~/expressions.ts'; +import type { Placeholder, SQL, SQLChunk, SQLWrapper } from '~/sql/sql.ts'; +import { sql } from '~/sql/sql.ts'; +import type { SingleStoreColumn } from './columns/index.ts'; + +export * from '~/expressions.ts'; + +export function concat(column: SingleStoreColumn | SQL.Aliased, value: string | Placeholder | SQLWrapper): SQL { + return sql`${column} || ${bindIfParam(value, column)}`; +} + +export function substring( + column: SingleStoreColumn | SQL.Aliased, + { from, for: _for }: { from?: number | Placeholder | SQLWrapper; for?: number | Placeholder | SQLWrapper }, +): SQL { + const chunks: SQLChunk[] = [sql`substring(`, column]; + if (from !== undefined) { + chunks.push(sql` from `, bindIfParam(from, column)); + } + if (_for !== undefined) { + chunks.push(sql` for `, bindIfParam(_for, column)); + } + chunks.push(sql`)`); + return sql.join(chunks); +} diff --git a/drizzle-orm/src/singlestore-core/index.ts b/drizzle-orm/src/singlestore-core/index.ts new file mode 100644 index 000000000..4e7d4268e --- /dev/null +++ b/drizzle-orm/src/singlestore-core/index.ts @@ -0,0 +1,15 @@ +export * from './alias.ts'; +export * from './columns/index.ts'; +export * from './db.ts'; +export * from './dialect.ts'; +export * from './indexes.ts'; +export * from './primary-keys.ts'; +export * from './query-builders/index.ts'; +export * from './schema.ts'; +export * from './session.ts'; +export * from './subquery.ts'; +export * from './table.ts'; +export * from './unique-constraint.ts'; +export * from './utils.ts'; +/* export * from './view-common.ts'; +export * from './view.ts'; */ diff --git a/drizzle-orm/src/singlestore-core/indexes.ts b/drizzle-orm/src/singlestore-core/indexes.ts new file mode 100644 index 000000000..3120cab1b --- /dev/null +++ b/drizzle-orm/src/singlestore-core/indexes.ts @@ -0,0 +1,193 @@ +import { entityKind } from '~/entity.ts'; +import type { SQL } from '~/sql/sql.ts'; +import type { AnySingleStoreColumn, SingleStoreColumn } from './columns/index.ts'; +import type { SingleStoreTable } from './table.ts'; + +interface IndexConfig { + name: string; + + columns: IndexColumn[]; + + /** + * If true, the index will be created as `create unique index` instead of `create index`. + */ + unique?: boolean; + + /** + * If set, the index will be created as `create index ... using { 'btree' | 'hash' }`. + */ + using?: 'btree' | 'hash'; + + /** + * If set, the index will be created as `create index ... algorythm { 'default' | 'inplace' | 'copy' }`. + */ + algorythm?: 'default' | 'inplace' | 'copy'; + + /** + * If set, adds locks to the index creation. + */ + lock?: 'default' | 'none' | 'shared' | 'exclusive'; +} + +export type IndexColumn = SingleStoreColumn | SQL; + +export class IndexBuilderOn { + static readonly [entityKind]: string = 'SingleStoreIndexBuilderOn'; + + constructor(private name: string, private unique: boolean) {} + + on(...columns: [IndexColumn, ...IndexColumn[]]): IndexBuilder { + return new IndexBuilder(this.name, columns, this.unique); + } +} + +export interface AnyIndexBuilder { + build(table: SingleStoreTable): Index; +} + +// eslint-disable-next-line @typescript-eslint/no-empty-interface +export interface IndexBuilder extends AnyIndexBuilder {} + +export class IndexBuilder implements AnyIndexBuilder { + static readonly [entityKind]: string = 'SingleStoreIndexBuilder'; + + /** @internal */ + config: IndexConfig; + + constructor(name: string, columns: IndexColumn[], unique: boolean) { + this.config = { + name, + columns, + unique, + }; + } + + using(using: IndexConfig['using']): this { + this.config.using = using; + return this; + } + + algorythm(algorythm: IndexConfig['algorythm']): this { + this.config.algorythm = algorythm; + return this; + } + + lock(lock: IndexConfig['lock']): this { + this.config.lock = lock; + return this; + } + + /** @internal */ + build(table: SingleStoreTable): Index { + return new Index(this.config, table); + } +} + +export class Index { + static readonly [entityKind]: string = 'SingleStoreIndex'; + + readonly config: IndexConfig & { table: SingleStoreTable }; + + constructor(config: IndexConfig, table: SingleStoreTable) { + this.config = { ...config, table }; + } +} + +export type GetColumnsTableName = TColumns extends + AnySingleStoreColumn<{ tableName: infer TTableName extends string }> | AnySingleStoreColumn< + { tableName: infer TTableName extends string } + >[] ? TTableName + : never; + +export function index(name: string): IndexBuilderOn { + return new IndexBuilderOn(name, false); +} + +export function uniqueIndex(name: string): IndexBuilderOn { + return new IndexBuilderOn(name, true); +} + +/* export interface AnyFullTextIndexBuilder { + build(table: SingleStoreTable): FullTextIndex; +} */ +/* +interface FullTextIndexConfig { + version?: number; +} + +interface FullTextIndexFullConfig extends FullTextIndexConfig { + columns: IndexColumn[]; + + name: string; +} + +export class FullTextIndexBuilderOn { + static readonly [entityKind]: string = 'SingleStoreFullTextIndexBuilderOn'; + + constructor(private name: string, private config: FullTextIndexConfig) {} + + on(...columns: [IndexColumn, ...IndexColumn[]]): FullTextIndexBuilder { + return new FullTextIndexBuilder({ + name: this.name, + columns: columns, + ...this.config, + }); + } +} */ + +/* +export interface FullTextIndexBuilder extends AnyFullTextIndexBuilder {} + +export class FullTextIndexBuilder implements AnyFullTextIndexBuilder { + static readonly [entityKind]: string = 'SingleStoreFullTextIndexBuilder'; */ + +/** @internal */ +/* config: FullTextIndexFullConfig; + + constructor(config: FullTextIndexFullConfig) { + this.config = config; + } */ + +/** @internal */ +/* build(table: SingleStoreTable): FullTextIndex { + return new FullTextIndex(this.config, table); + } +} + +export class FullTextIndex { + static readonly [entityKind]: string = 'SingleStoreFullTextIndex'; + + readonly config: FullTextIndexConfig & { table: SingleStoreTable }; + + constructor(config: FullTextIndexConfig, table: SingleStoreTable) { + this.config = { ...config, table }; + } +} + +export function fulltext(name: string, config: FullTextIndexConfig): FullTextIndexBuilderOn { + return new FullTextIndexBuilderOn(name, config); +} + +export type SortKeyColumn = SingleStoreColumn | SQL; + +export class SortKeyBuilder { + static readonly [entityKind]: string = 'SingleStoreSortKeyBuilder'; + + constructor(private columns: SortKeyColumn[]) {} */ + +/** @internal */ +/* build(table: SingleStoreTable): SortKey { + return new SortKey(this.columns, table); + } +} + +export class SortKey { + static readonly [entityKind]: string = 'SingleStoreSortKey'; + + constructor(public columns: SortKeyColumn[], public table: SingleStoreTable) {} +} + +export function sortKey(...columns: SortKeyColumn[]): SortKeyBuilder { + return new SortKeyBuilder(columns); +} + */ diff --git a/drizzle-orm/src/singlestore-core/primary-keys.ts b/drizzle-orm/src/singlestore-core/primary-keys.ts new file mode 100644 index 000000000..47dc0a19c --- /dev/null +++ b/drizzle-orm/src/singlestore-core/primary-keys.ts @@ -0,0 +1,63 @@ +import { entityKind } from '~/entity.ts'; +import type { AnySingleStoreColumn, SingleStoreColumn } from './columns/index.ts'; +import { SingleStoreTable } from './table.ts'; + +export function primaryKey< + TTableName extends string, + TColumn extends AnySingleStoreColumn<{ tableName: TTableName }>, + TColumns extends AnySingleStoreColumn<{ tableName: TTableName }>[], +>(config: { name?: string; columns: [TColumn, ...TColumns] }): PrimaryKeyBuilder; +/** + * @deprecated: Please use primaryKey({ columns: [] }) instead of this function + * @param columns + */ +export function primaryKey< + TTableName extends string, + TColumns extends AnySingleStoreColumn<{ tableName: TTableName }>[], +>(...columns: TColumns): PrimaryKeyBuilder; +export function primaryKey(...config: any) { + if (config[0].columns) { + return new PrimaryKeyBuilder(config[0].columns, config[0].name); + } + return new PrimaryKeyBuilder(config); +} + +export class PrimaryKeyBuilder { + static readonly [entityKind]: string = 'SingleStorePrimaryKeyBuilder'; + + /** @internal */ + columns: SingleStoreColumn[]; + + /** @internal */ + name?: string; + + constructor( + columns: SingleStoreColumn[], + name?: string, + ) { + this.columns = columns; + this.name = name; + } + + /** @internal */ + build(table: SingleStoreTable): PrimaryKey { + return new PrimaryKey(table, this.columns, this.name); + } +} + +export class PrimaryKey { + static readonly [entityKind]: string = 'SingleStorePrimaryKey'; + + readonly columns: SingleStoreColumn[]; + readonly name?: string; + + constructor(readonly table: SingleStoreTable, columns: SingleStoreColumn[], name?: string) { + this.columns = columns; + this.name = name; + } + + getName(): string { + return this.name + ?? `${this.table[SingleStoreTable.Symbol.Name]}_${this.columns.map((column) => column.name).join('_')}_pk`; + } +} diff --git a/drizzle-orm/src/singlestore-core/query-builders/count.ts b/drizzle-orm/src/singlestore-core/query-builders/count.ts new file mode 100644 index 000000000..aba5b2f3f --- /dev/null +++ b/drizzle-orm/src/singlestore-core/query-builders/count.ts @@ -0,0 +1,79 @@ +import { entityKind } from '~/entity.ts'; +import { SQL, sql, type SQLWrapper } from '~/sql/sql.ts'; +import type { SingleStoreSession } from '../session.ts'; +import type { SingleStoreTable } from '../table.ts'; +/* import type { SingleStoreViewBase } from '../view-base.ts'; */ + +export class SingleStoreCountBuilder< + TSession extends SingleStoreSession, +> extends SQL implements Promise, SQLWrapper { + private sql: SQL; + + static override readonly [entityKind] = 'SingleStoreCountBuilder'; + [Symbol.toStringTag] = 'SingleStoreCountBuilder'; + + private session: TSession; + + private static buildEmbeddedCount( + source: SingleStoreTable | /* SingleStoreViewBase | */ SQL | SQLWrapper, + filters?: SQL, + ): SQL { + return sql`(select count(*) from ${source}${sql.raw(' where ').if(filters)}${filters})`; + } + + private static buildCount( + source: SingleStoreTable | /* SingleStoreViewBase | */ SQL | SQLWrapper, + filters?: SQL, + ): SQL { + return sql`select count(*) as count from ${source}${sql.raw(' where ').if(filters)}${filters}`; + } + + constructor( + readonly params: { + source: SingleStoreTable | /* SingleStoreViewBase | */ SQL | SQLWrapper; + filters?: SQL; + session: TSession; + }, + ) { + super(SingleStoreCountBuilder.buildEmbeddedCount(params.source, params.filters).queryChunks); + + this.mapWith(Number); + + this.session = params.session; + + this.sql = SingleStoreCountBuilder.buildCount( + params.source, + params.filters, + ); + } + + then( + onfulfilled?: ((value: number) => TResult1 | PromiseLike) | null | undefined, + onrejected?: ((reason: any) => TResult2 | PromiseLike) | null | undefined, + ): Promise { + return Promise.resolve(this.session.count(this.sql)) + .then( + onfulfilled, + onrejected, + ); + } + + catch( + onRejected?: ((reason: any) => never | PromiseLike) | null | undefined, + ): Promise { + return this.then(undefined, onRejected); + } + + finally(onFinally?: (() => void) | null | undefined): Promise { + return this.then( + (value) => { + onFinally?.(); + return value; + }, + (reason) => { + onFinally?.(); + throw reason; + }, + ); + } +} diff --git a/drizzle-orm/src/singlestore-core/query-builders/delete.ts b/drizzle-orm/src/singlestore-core/query-builders/delete.ts new file mode 100644 index 000000000..1f41d29ba --- /dev/null +++ b/drizzle-orm/src/singlestore-core/query-builders/delete.ts @@ -0,0 +1,207 @@ +import { entityKind } from '~/entity.ts'; +import { QueryPromise } from '~/query-promise.ts'; +import { SelectionProxyHandler } from '~/selection-proxy.ts'; +import type { SingleStoreDialect } from '~/singlestore-core/dialect.ts'; +import type { + AnySingleStoreQueryResultHKT, + PreparedQueryHKTBase, + PreparedQueryKind, + SingleStorePreparedQueryConfig, + SingleStoreQueryResultHKT, + SingleStoreQueryResultKind, + SingleStoreSession, +} from '~/singlestore-core/session.ts'; +import type { SingleStoreTable } from '~/singlestore-core/table.ts'; +import type { Placeholder, Query, SQL, SQLWrapper } from '~/sql/sql.ts'; +import type { Subquery } from '~/subquery.ts'; +import { Table } from '~/table.ts'; +import type { ValueOrArray } from '~/utils.ts'; +import type { SingleStoreColumn } from '../columns/common.ts'; +import type { SelectedFieldsOrdered } from './select.types.ts'; + +export type SingleStoreDeleteWithout< + T extends AnySingleStoreDeleteBase, + TDynamic extends boolean, + K extends keyof T & string, +> = TDynamic extends true ? T + : Omit< + SingleStoreDeleteBase< + T['_']['table'], + T['_']['queryResult'], + T['_']['preparedQueryHKT'], + TDynamic, + T['_']['excludedMethods'] | K + >, + T['_']['excludedMethods'] | K + >; + +export type SingleStoreDelete< + TTable extends SingleStoreTable = SingleStoreTable, + TQueryResult extends SingleStoreQueryResultHKT = AnySingleStoreQueryResultHKT, + TPreparedQueryHKT extends PreparedQueryHKTBase = PreparedQueryHKTBase, +> = SingleStoreDeleteBase; + +export interface SingleStoreDeleteConfig { + where?: SQL | undefined; + limit?: number | Placeholder; + orderBy?: (SingleStoreColumn | SQL | SQL.Aliased)[]; + table: SingleStoreTable; + returning?: SelectedFieldsOrdered; + withList?: Subquery[]; +} + +export type SingleStoreDeletePrepare = PreparedQueryKind< + T['_']['preparedQueryHKT'], + SingleStorePreparedQueryConfig & { + execute: SingleStoreQueryResultKind; + iterator: never; + }, + true +>; + +type SingleStoreDeleteDynamic = SingleStoreDelete< + T['_']['table'], + T['_']['queryResult'], + T['_']['preparedQueryHKT'] +>; + +type AnySingleStoreDeleteBase = SingleStoreDeleteBase; + +export interface SingleStoreDeleteBase< + TTable extends SingleStoreTable, + TQueryResult extends SingleStoreQueryResultHKT, + TPreparedQueryHKT extends PreparedQueryHKTBase, + TDynamic extends boolean = false, + TExcludedMethods extends string = never, +> extends QueryPromise> { + readonly _: { + readonly table: TTable; + readonly queryResult: TQueryResult; + readonly preparedQueryHKT: TPreparedQueryHKT; + readonly dynamic: TDynamic; + readonly excludedMethods: TExcludedMethods; + }; +} + +export class SingleStoreDeleteBase< + TTable extends SingleStoreTable, + TQueryResult extends SingleStoreQueryResultHKT, + // eslint-disable-next-line @typescript-eslint/no-unused-vars + TPreparedQueryHKT extends PreparedQueryHKTBase, + TDynamic extends boolean = false, + // eslint-disable-next-line @typescript-eslint/no-unused-vars + TExcludedMethods extends string = never, +> extends QueryPromise> implements SQLWrapper { + static override readonly [entityKind]: string = 'SingleStoreDelete'; + + private config: SingleStoreDeleteConfig; + + constructor( + private table: TTable, + private session: SingleStoreSession, + private dialect: SingleStoreDialect, + withList?: Subquery[], + ) { + super(); + this.config = { table, withList }; + } + + /** + * Adds a `where` clause to the query. + * + * Calling this method will delete only those rows that fulfill a specified condition. + * + * See docs: {@link https://orm.drizzle.team/docs/delete} + * + * @param where the `where` clause. + * + * @example + * You can use conditional operators and `sql function` to filter the rows to be deleted. + * + * ```ts + * // Delete all cars with green color + * db.delete(cars).where(eq(cars.color, 'green')); + * // or + * db.delete(cars).where(sql`${cars.color} = 'green'`) + * ``` + * + * You can logically combine conditional operators with `and()` and `or()` operators: + * + * ```ts + * // Delete all BMW cars with a green color + * db.delete(cars).where(and(eq(cars.color, 'green'), eq(cars.brand, 'BMW'))); + * + * // Delete all cars with the green or blue color + * db.delete(cars).where(or(eq(cars.color, 'green'), eq(cars.color, 'blue'))); + * ``` + */ + where(where: SQL | undefined): SingleStoreDeleteWithout { + this.config.where = where; + return this as any; + } + + orderBy( + builder: (deleteTable: TTable) => ValueOrArray, + ): SingleStoreDeleteWithout; + orderBy(...columns: (SingleStoreColumn | SQL | SQL.Aliased)[]): SingleStoreDeleteWithout; + orderBy( + ...columns: + | [(deleteTable: TTable) => ValueOrArray] + | (SingleStoreColumn | SQL | SQL.Aliased)[] + ): SingleStoreDeleteWithout { + if (typeof columns[0] === 'function') { + const orderBy = columns[0]( + new Proxy( + this.config.table[Table.Symbol.Columns], + new SelectionProxyHandler({ sqlAliasedBehavior: 'alias', sqlBehavior: 'sql' }), + ) as any, + ); + + const orderByArray = Array.isArray(orderBy) ? orderBy : [orderBy]; + this.config.orderBy = orderByArray; + } else { + const orderByArray = columns as (SingleStoreColumn | SQL | SQL.Aliased)[]; + this.config.orderBy = orderByArray; + } + return this as any; + } + + limit(limit: number | Placeholder): SingleStoreDeleteWithout { + this.config.limit = limit; + return this as any; + } + + /** @internal */ + getSQL(): SQL { + return this.dialect.buildDeleteQuery(this.config); + } + + toSQL(): Query { + const { typings: _typings, ...rest } = this.dialect.sqlToQuery(this.getSQL()); + return rest; + } + + prepare(): SingleStoreDeletePrepare { + return this.session.prepareQuery( + this.dialect.sqlToQuery(this.getSQL()), + this.config.returning, + ) as SingleStoreDeletePrepare; + } + + override execute: ReturnType['execute'] = (placeholderValues) => { + return this.prepare().execute(placeholderValues); + }; + + private createIterator = (): ReturnType['iterator'] => { + const self = this; + return async function*(placeholderValues) { + yield* self.prepare().iterator(placeholderValues); + }; + }; + + iterator = this.createIterator(); + + $dynamic(): SingleStoreDeleteDynamic { + return this as any; + } +} diff --git a/drizzle-orm/src/singlestore-core/query-builders/index.ts b/drizzle-orm/src/singlestore-core/query-builders/index.ts new file mode 100644 index 000000000..704cb4afa --- /dev/null +++ b/drizzle-orm/src/singlestore-core/query-builders/index.ts @@ -0,0 +1,11 @@ +/* export * from './attach.ts'; +export * from './branch.ts'; +export * from './createMilestone.ts'; */ +export * from './delete.ts'; +/* export * from './detach.ts'; */ +export * from './insert.ts'; +/* export * from './optimizeTable.ts'; */ +export * from './query-builder.ts'; +export * from './select.ts'; +export * from './select.types.ts'; +export * from './update.ts'; diff --git a/drizzle-orm/src/singlestore-core/query-builders/insert.ts b/drizzle-orm/src/singlestore-core/query-builders/insert.ts new file mode 100644 index 000000000..84a72fdab --- /dev/null +++ b/drizzle-orm/src/singlestore-core/query-builders/insert.ts @@ -0,0 +1,305 @@ +import { entityKind, is } from '~/entity.ts'; +import { QueryPromise } from '~/query-promise.ts'; +import type { RunnableQuery } from '~/runnable-query.ts'; +import type { SingleStoreDialect } from '~/singlestore-core/dialect.ts'; +import type { + AnySingleStoreQueryResultHKT, + PreparedQueryHKTBase, + PreparedQueryKind, + SingleStorePreparedQueryConfig, + SingleStoreQueryResultHKT, + SingleStoreQueryResultKind, + SingleStoreSession, +} from '~/singlestore-core/session.ts'; +import type { SingleStoreTable } from '~/singlestore-core/table.ts'; +import type { Placeholder, Query, SQLWrapper } from '~/sql/sql.ts'; +import { Param, SQL, sql } from '~/sql/sql.ts'; +import type { InferModelFromColumns } from '~/table.ts'; +import { Table } from '~/table.ts'; +import { mapUpdateSet, orderSelectedFields } from '~/utils.ts'; +import type { AnySingleStoreColumn, SingleStoreColumn } from '../columns/common.ts'; +import type { SelectedFieldsOrdered } from './select.types.ts'; +import type { SingleStoreUpdateSetSource } from './update.ts'; + +export interface SingleStoreInsertConfig { + table: TTable; + values: Record[]; + ignore: boolean; + onConflict?: SQL; + returning?: SelectedFieldsOrdered; +} + +export type AnySingleStoreInsertConfig = SingleStoreInsertConfig; + +export type SingleStoreInsertValue = + & { + [Key in keyof TTable['$inferInsert']]: TTable['$inferInsert'][Key] | SQL | Placeholder; + } + & {}; + +export class SingleStoreInsertBuilder< + TTable extends SingleStoreTable, + TQueryResult extends SingleStoreQueryResultHKT, + TPreparedQueryHKT extends PreparedQueryHKTBase, +> { + static readonly [entityKind]: string = 'SingleStoreInsertBuilder'; + + private shouldIgnore = false; + + constructor( + private table: TTable, + private session: SingleStoreSession, + private dialect: SingleStoreDialect, + ) {} + + ignore(): this { + this.shouldIgnore = true; + return this; + } + + values(value: SingleStoreInsertValue): SingleStoreInsertBase; + values(values: SingleStoreInsertValue[]): SingleStoreInsertBase; + values( + values: SingleStoreInsertValue | SingleStoreInsertValue[], + ): SingleStoreInsertBase { + values = Array.isArray(values) ? values : [values]; + if (values.length === 0) { + throw new Error('values() must be called with at least one value'); + } + const mappedValues = values.map((entry) => { + const result: Record = {}; + const cols = this.table[Table.Symbol.Columns]; + for (const colKey of Object.keys(entry)) { + const colValue = entry[colKey as keyof typeof entry]; + result[colKey] = is(colValue, SQL) ? colValue : new Param(colValue, cols[colKey]); + } + return result; + }); + + return new SingleStoreInsertBase(this.table, mappedValues, this.shouldIgnore, this.session, this.dialect); + } +} + +export type SingleStoreInsertWithout< + T extends AnySingleStoreInsert, + TDynamic extends boolean, + K extends keyof T & string, +> = TDynamic extends true ? T + : Omit< + SingleStoreInsertBase< + T['_']['table'], + T['_']['queryResult'], + T['_']['preparedQueryHKT'], + T['_']['returning'], + TDynamic, + T['_']['excludedMethods'] | '$returning' + >, + T['_']['excludedMethods'] | K + >; + +export type SingleStoreInsertDynamic = SingleStoreInsert< + T['_']['table'], + T['_']['queryResult'], + T['_']['preparedQueryHKT'], + T['_']['returning'] +>; + +export type SingleStoreInsertPrepare< + T extends AnySingleStoreInsert, + TReturning extends Record | undefined = undefined, +> = PreparedQueryKind< + T['_']['preparedQueryHKT'], + SingleStorePreparedQueryConfig & { + execute: TReturning extends undefined ? SingleStoreQueryResultKind : TReturning[]; + iterator: never; + }, + true +>; + +export type SingleStoreInsertOnDuplicateKeyUpdateConfig = { + set: SingleStoreUpdateSetSource; +}; + +export type SingleStoreInsert< + TTable extends SingleStoreTable = SingleStoreTable, + TQueryResult extends SingleStoreQueryResultHKT = AnySingleStoreQueryResultHKT, + TPreparedQueryHKT extends PreparedQueryHKTBase = PreparedQueryHKTBase, + TReturning extends Record | undefined = Record | undefined, +> = SingleStoreInsertBase; + +export type SingleStoreInsertReturning< + T extends AnySingleStoreInsert, + TDynamic extends boolean, +> = SingleStoreInsertBase< + T['_']['table'], + T['_']['queryResult'], + T['_']['preparedQueryHKT'], + InferModelFromColumns>, + TDynamic, + T['_']['excludedMethods'] | '$returning' +>; + +export type AnySingleStoreInsert = SingleStoreInsertBase; + +export interface SingleStoreInsertBase< + TTable extends SingleStoreTable, + TQueryResult extends SingleStoreQueryResultHKT, + TPreparedQueryHKT extends PreparedQueryHKTBase, + TReturning extends Record | undefined = undefined, + TDynamic extends boolean = false, + TExcludedMethods extends string = never, +> extends + QueryPromise : TReturning[]>, + RunnableQuery< + TReturning extends undefined ? SingleStoreQueryResultKind : TReturning[], + 'singlestore' + >, + SQLWrapper +{ + readonly _: { + readonly dialect: 'singlestore'; + readonly table: TTable; + readonly queryResult: TQueryResult; + readonly preparedQueryHKT: TPreparedQueryHKT; + readonly dynamic: TDynamic; + readonly excludedMethods: TExcludedMethods; + readonly returning: TReturning; + readonly result: TReturning extends undefined ? SingleStoreQueryResultKind : TReturning[]; + }; +} + +export type PrimaryKeyKeys> = { + [K in keyof T]: T[K]['_']['isPrimaryKey'] extends true ? T[K]['_']['isAutoincrement'] extends true ? K + : T[K]['_']['hasRuntimeDefault'] extends true ? T[K]['_']['isPrimaryKey'] extends true ? K : never + : never + : T[K]['_']['hasRuntimeDefault'] extends true ? T[K]['_']['isPrimaryKey'] extends true ? K : never + : never; +}[keyof T]; + +export type GetPrimarySerialOrDefaultKeys> = { + [K in PrimaryKeyKeys]: T[K]; +}; + +export class SingleStoreInsertBase< + TTable extends SingleStoreTable, + TQueryResult extends SingleStoreQueryResultHKT, + // eslint-disable-next-line @typescript-eslint/no-unused-vars + TPreparedQueryHKT extends PreparedQueryHKTBase, + // eslint-disable-next-line @typescript-eslint/no-unused-vars + TReturning extends Record | undefined = undefined, + // eslint-disable-next-line @typescript-eslint/no-unused-vars + TDynamic extends boolean = false, + // eslint-disable-next-line @typescript-eslint/no-unused-vars + TExcludedMethods extends string = never, +> extends QueryPromise : TReturning[]> + implements + RunnableQuery< + TReturning extends undefined ? SingleStoreQueryResultKind : TReturning[], + 'singlestore' + >, + SQLWrapper +{ + static override readonly [entityKind]: string = 'SingleStoreInsert'; + + declare protected $table: TTable; + + private config: SingleStoreInsertConfig; + + constructor( + table: TTable, + values: SingleStoreInsertConfig['values'], + ignore: boolean, + private session: SingleStoreSession, + private dialect: SingleStoreDialect, + ) { + super(); + this.config = { table, values, ignore }; + } + + /** + * Adds an `on duplicate key update` clause to the query. + * + * Calling this method will update update the row if any unique index conflicts. MySQL will automatically determine the conflict target based on the primary key and unique indexes. + * + * See docs: {@link https://orm.drizzle.team/docs/insert#on-duplicate-key-update} + * + * @param config The `set` clause + * + * @example + * ```ts + * await db.insert(cars) + * .values({ id: 1, brand: 'BMW'}) + * .onDuplicateKeyUpdate({ set: { brand: 'Porsche' }}); + * ``` + * + * While MySQL does not directly support doing nothing on conflict, you can perform a no-op by setting any column's value to itself and achieve the same effect: + * + * ```ts + * import { sql } from 'drizzle-orm'; + * + * await db.insert(cars) + * .values({ id: 1, brand: 'BMW' }) + * .onDuplicateKeyUpdate({ set: { id: sql`id` } }); + * ``` + */ + onDuplicateKeyUpdate( + config: SingleStoreInsertOnDuplicateKeyUpdateConfig, + ): SingleStoreInsertWithout { + const setSql = this.dialect.buildUpdateSet(this.config.table, mapUpdateSet(this.config.table, config.set)); + this.config.onConflict = sql`update ${setSql}`; + return this as any; + } + + $returningId(): SingleStoreInsertWithout< + SingleStoreInsertReturning, + TDynamic, + '$returningId' + > { + const returning: SelectedFieldsOrdered = []; + for (const [key, value] of Object.entries(this.config.table[Table.Symbol.Columns])) { + if (value.primary) { + returning.push({ field: value, path: [key] }); + } + } + this.config.returning = orderSelectedFields(this.config.table[Table.Symbol.Columns]); + return this as any; + } + + /** @internal */ + getSQL(): SQL { + return this.dialect.buildInsertQuery(this.config).sql; + } + + toSQL(): Query { + const { typings: _typings, ...rest } = this.dialect.sqlToQuery(this.getSQL()); + return rest; + } + + prepare(): SingleStoreInsertPrepare { + const { sql, generatedIds } = this.dialect.buildInsertQuery(this.config); + return this.session.prepareQuery( + this.dialect.sqlToQuery(sql), + undefined, + undefined, + generatedIds, + this.config.returning, + ) as SingleStoreInsertPrepare; + } + + override execute: ReturnType['execute'] = (placeholderValues) => { + return this.prepare().execute(placeholderValues); + }; + + private createIterator = (): ReturnType['iterator'] => { + const self = this; + return async function*(placeholderValues) { + yield* self.prepare().iterator(placeholderValues); + }; + }; + + iterator = this.createIterator(); + + $dynamic(): SingleStoreInsertDynamic { + return this as any; + } +} diff --git a/drizzle-orm/src/singlestore-core/query-builders/query-builder.ts b/drizzle-orm/src/singlestore-core/query-builders/query-builder.ts new file mode 100644 index 000000000..29d6c2290 --- /dev/null +++ b/drizzle-orm/src/singlestore-core/query-builders/query-builder.ts @@ -0,0 +1,114 @@ +import { entityKind, is } from '~/entity.ts'; +import type { TypedQueryBuilder } from '~/query-builders/query-builder.ts'; +import { SelectionProxyHandler } from '~/selection-proxy.ts'; +import type { SingleStoreDialectConfig } from '~/singlestore-core/dialect.ts'; +import { SingleStoreDialect } from '~/singlestore-core/dialect.ts'; +import type { WithSubqueryWithSelection } from '~/singlestore-core/subquery.ts'; +import type { ColumnsSelection } from '~/sql/sql.ts'; +import { WithSubquery } from '~/subquery.ts'; +import { SingleStoreSelectBuilder } from './select.ts'; +import type { SelectedFields } from './select.types.ts'; + +export class QueryBuilder { + static readonly [entityKind]: string = 'SingleStoreQueryBuilder'; + + private dialect: SingleStoreDialect | undefined; + private dialectConfig: SingleStoreDialectConfig | undefined; + + constructor(dialect?: SingleStoreDialect | SingleStoreDialectConfig) { + this.dialect = is(dialect, SingleStoreDialect) ? dialect : undefined; + this.dialectConfig = is(dialect, SingleStoreDialect) ? undefined : dialect; + } + + $with(alias: TAlias) { + const queryBuilder = this; + + return { + as( + qb: TypedQueryBuilder | ((qb: QueryBuilder) => TypedQueryBuilder), + ): WithSubqueryWithSelection { + if (typeof qb === 'function') { + qb = qb(queryBuilder); + } + + return new Proxy( + new WithSubquery(qb.getSQL(), qb.getSelectedFields() as SelectedFields, alias, true), + new SelectionProxyHandler({ alias, sqlAliasedBehavior: 'alias', sqlBehavior: 'error' }), + ) as WithSubqueryWithSelection; + }, + }; + } + + with(...queries: WithSubquery[]) { + const self = this; + + function select(): SingleStoreSelectBuilder; + function select( + fields: TSelection, + ): SingleStoreSelectBuilder; + function select( + fields?: TSelection, + ): SingleStoreSelectBuilder { + return new SingleStoreSelectBuilder({ + fields: fields ?? undefined, + session: undefined, + dialect: self.getDialect(), + withList: queries, + }); + } + + function selectDistinct(): SingleStoreSelectBuilder; + function selectDistinct( + fields: TSelection, + ): SingleStoreSelectBuilder; + function selectDistinct( + fields?: TSelection, + ): SingleStoreSelectBuilder { + return new SingleStoreSelectBuilder({ + fields: fields ?? undefined, + session: undefined, + dialect: self.getDialect(), + withList: queries, + distinct: true, + }); + } + + return { select, selectDistinct }; + } + + select(): SingleStoreSelectBuilder; + select(fields: TSelection): SingleStoreSelectBuilder; + select( + fields?: TSelection, + ): SingleStoreSelectBuilder { + return new SingleStoreSelectBuilder({ + fields: fields ?? undefined, + session: undefined, + dialect: this.getDialect(), + }); + } + + selectDistinct(): SingleStoreSelectBuilder; + selectDistinct( + fields: TSelection, + ): SingleStoreSelectBuilder; + selectDistinct( + fields?: TSelection, + ): SingleStoreSelectBuilder { + return new SingleStoreSelectBuilder({ + fields: fields ?? undefined, + session: undefined, + dialect: this.getDialect(), + distinct: true, + }); + } + + // Lazy load dialect to avoid circular dependency + private getDialect() { + if (!this.dialect) { + this.dialect = new SingleStoreDialect(this.dialectConfig); + } + + return this.dialect; + } +} diff --git a/drizzle-orm/src/singlestore-core/query-builders/query.ts b/drizzle-orm/src/singlestore-core/query-builders/query.ts new file mode 100644 index 000000000..c15f7ad59 --- /dev/null +++ b/drizzle-orm/src/singlestore-core/query-builders/query.ts @@ -0,0 +1,141 @@ +import { entityKind } from '~/entity.ts'; +import { QueryPromise } from '~/query-promise.ts'; +import { + type BuildQueryResult, + type BuildRelationalQueryResult, + type DBQueryConfig, + mapRelationalRow, + type TableRelationalConfig, + type TablesRelationalConfig, +} from '~/relations.ts'; +import type { Query, QueryWithTypings, SQL } from '~/sql/sql.ts'; +import type { KnownKeysOnly } from '~/utils.ts'; +import type { SingleStoreDialect } from '../dialect.ts'; +import type { + PreparedQueryHKTBase, + PreparedQueryKind, + SingleStorePreparedQueryConfig, + SingleStoreSession, +} from '../session.ts'; +import type { SingleStoreTable } from '../table.ts'; + +export class RelationalQueryBuilder< + TPreparedQueryHKT extends PreparedQueryHKTBase, + TSchema extends TablesRelationalConfig, + TFields extends TableRelationalConfig, +> { + static readonly [entityKind]: string = 'SingleStoreRelationalQueryBuilder'; + + constructor( + private fullSchema: Record, + private schema: TSchema, + private tableNamesMap: Record, + private table: SingleStoreTable, + private tableConfig: TableRelationalConfig, + private dialect: SingleStoreDialect, + private session: SingleStoreSession, + ) {} + + findMany>( + config?: KnownKeysOnly>, + ): SingleStoreRelationalQuery[]> { + return new SingleStoreRelationalQuery( + this.fullSchema, + this.schema, + this.tableNamesMap, + this.table, + this.tableConfig, + this.dialect, + this.session, + config ? (config as DBQueryConfig<'many', true>) : {}, + 'many', + ); + } + + findFirst, 'limit'>>( + config?: KnownKeysOnly, 'limit'>>, + ): SingleStoreRelationalQuery | undefined> { + return new SingleStoreRelationalQuery( + this.fullSchema, + this.schema, + this.tableNamesMap, + this.table, + this.tableConfig, + this.dialect, + this.session, + config ? { ...(config as DBQueryConfig<'many', true> | undefined), limit: 1 } : { limit: 1 }, + 'first', + ); + } +} + +export class SingleStoreRelationalQuery< + TPreparedQueryHKT extends PreparedQueryHKTBase, + TResult, +> extends QueryPromise { + static override readonly [entityKind]: string = 'SingleStoreRelationalQuery'; + + declare protected $brand: 'SingleStoreRelationalQuery'; + + constructor( + private fullSchema: Record, + private schema: TablesRelationalConfig, + private tableNamesMap: Record, + private table: SingleStoreTable, + private tableConfig: TableRelationalConfig, + private dialect: SingleStoreDialect, + private session: SingleStoreSession, + private config: DBQueryConfig<'many', true> | true, + private queryMode: 'many' | 'first', + ) { + super(); + } + + prepare() { + const { query, builtQuery } = this._toSQL(); + return this.session.prepareQuery( + builtQuery, + undefined, + (rawRows) => { + const rows = rawRows.map((row) => mapRelationalRow(this.schema, this.tableConfig, row, query.selection)); + if (this.queryMode === 'first') { + return rows[0] as TResult; + } + return rows as TResult; + }, + ) as PreparedQueryKind; + } + + private _getQuery() { + return this.dialect.buildRelationalQuery({ + fullSchema: this.fullSchema, + schema: this.schema, + tableNamesMap: this.tableNamesMap, + table: this.table, + tableConfig: this.tableConfig, + queryConfig: this.config, + tableAlias: this.tableConfig.tsName, + }); + } + + private _toSQL(): { query: BuildRelationalQueryResult; builtQuery: QueryWithTypings } { + const query = this._getQuery(); + + const builtQuery = this.dialect.sqlToQuery(query.sql as SQL); + + return { builtQuery, query }; + } + + /** @internal */ + getSQL(): SQL { + return this._getQuery().sql as SQL; + } + + toSQL(): Query { + return this._toSQL().builtQuery; + } + + override execute(): Promise { + return this.prepare().execute(); + } +} diff --git a/drizzle-orm/src/singlestore-core/query-builders/select.ts b/drizzle-orm/src/singlestore-core/query-builders/select.ts new file mode 100644 index 000000000..36a3893a2 --- /dev/null +++ b/drizzle-orm/src/singlestore-core/query-builders/select.ts @@ -0,0 +1,1082 @@ +import { entityKind, is } from '~/entity.ts'; +import { TypedQueryBuilder } from '~/query-builders/query-builder.ts'; +import type { + BuildSubquerySelection, + GetSelectTableName, + GetSelectTableSelection, + JoinNullability, + JoinType, + SelectMode, + SelectResult, + SetOperator, +} from '~/query-builders/select.types.ts'; +import { QueryPromise } from '~/query-promise.ts'; +import { SelectionProxyHandler } from '~/selection-proxy.ts'; +import type { SingleStoreColumn } from '~/singlestore-core/columns/index.ts'; +import type { SingleStoreDialect } from '~/singlestore-core/dialect.ts'; +import type { + PreparedQueryHKTBase, + SingleStorePreparedQueryConfig, + SingleStoreSession, +} from '~/singlestore-core/session.ts'; +import type { SubqueryWithSelection } from '~/singlestore-core/subquery.ts'; +import type { SingleStoreTable } from '~/singlestore-core/table.ts'; +import type { ColumnsSelection, Query } from '~/sql/sql.ts'; +import { SQL } from '~/sql/sql.ts'; +import { Subquery } from '~/subquery.ts'; +import { Table } from '~/table.ts'; +import { + applyMixins, + getTableColumns, + getTableLikeName, + haveSameKeys, + orderSelectedFields, + type ValueOrArray, +} from '~/utils.ts'; +import type { + AnySingleStoreSelect, + CreateSingleStoreSelectFromBuilderMode, + GetSingleStoreSetOperators, + LockConfig, + LockStrength, + SelectedFields, + SetOperatorRightSelect, + SingleStoreCreateSetOperatorFn, + SingleStoreJoinFn, + SingleStoreSelectConfig, + SingleStoreSelectDynamic, + SingleStoreSelectHKT, + SingleStoreSelectHKTBase, + SingleStoreSelectPrepare, + SingleStoreSelectWithout, + SingleStoreSetOperatorExcludedMethods, + SingleStoreSetOperatorWithResult, +} from './select.types.ts'; + +export class SingleStoreSelectBuilder< + TSelection extends SelectedFields | undefined, + TPreparedQueryHKT extends PreparedQueryHKTBase, + TBuilderMode extends 'db' | 'qb' = 'db', +> { + static readonly [entityKind]: string = 'SingleStoreSelectBuilder'; + + private fields: TSelection; + private session: SingleStoreSession | undefined; + private dialect: SingleStoreDialect; + private withList: Subquery[] = []; + private distinct: boolean | undefined; + + constructor( + config: { + fields: TSelection; + session: SingleStoreSession | undefined; + dialect: SingleStoreDialect; + withList?: Subquery[]; + distinct?: boolean; + }, + ) { + this.fields = config.fields; + this.session = config.session; + this.dialect = config.dialect; + if (config.withList) { + this.withList = config.withList; + } + this.distinct = config.distinct; + } + + from( // | SingleStoreViewBase + source: TFrom, + ): CreateSingleStoreSelectFromBuilderMode< + TBuilderMode, + GetSelectTableName, + TSelection extends undefined ? GetSelectTableSelection : TSelection, + TSelection extends undefined ? 'single' : 'partial', + TPreparedQueryHKT + > { + const isPartialSelect = !!this.fields; + + let fields: SelectedFields; + if (this.fields) { + fields = this.fields; + } else if (is(source, Subquery)) { + // This is required to use the proxy handler to get the correct field values from the subquery + fields = Object.fromEntries( + Object.keys(source._.selectedFields).map(( + key, + ) => [key, source[key as unknown as keyof typeof source] as unknown as SelectedFields[string]]), + ); + /* } else if (is(source, SingleStoreViewBase)) { + fields = source[ViewBaseConfig].selectedFields as SelectedFields; */ + } else if (is(source, SQL)) { + fields = {}; + } else { + fields = getTableColumns(source); + } + + return new SingleStoreSelectBase( + { + table: source, + fields, + isPartialSelect, + session: this.session, + dialect: this.dialect, + withList: this.withList, + distinct: this.distinct, + }, + ) as any; + } +} + +export abstract class SingleStoreSelectQueryBuilderBase< + THKT extends SingleStoreSelectHKTBase, + TTableName extends string | undefined, + TSelection extends ColumnsSelection, + TSelectMode extends SelectMode, + TPreparedQueryHKT extends PreparedQueryHKTBase, + TNullabilityMap extends Record = TTableName extends string ? Record + : {}, + TDynamic extends boolean = false, + TExcludedMethods extends string = never, + TResult extends any[] = SelectResult[], + TSelectedFields extends ColumnsSelection = BuildSubquerySelection, +> extends TypedQueryBuilder { + static override readonly [entityKind]: string = 'SingleStoreSelectQueryBuilder'; + + override readonly _: { + readonly hkt: THKT; + readonly tableName: TTableName; + readonly selection: TSelection; + readonly selectMode: TSelectMode; + readonly preparedQueryHKT: TPreparedQueryHKT; + readonly nullabilityMap: TNullabilityMap; + readonly dynamic: TDynamic; + readonly excludedMethods: TExcludedMethods; + readonly result: TResult; + readonly selectedFields: TSelectedFields; + }; + + protected config: SingleStoreSelectConfig; + protected joinsNotNullableMap: Record; + private tableName: string | undefined; + private isPartialSelect: boolean; + /** @internal */ + readonly session: SingleStoreSession | undefined; + protected dialect: SingleStoreDialect; + + constructor( + { table, fields, isPartialSelect, session, dialect, withList, distinct }: { + table: SingleStoreSelectConfig['table']; + fields: SingleStoreSelectConfig['fields']; + isPartialSelect: boolean; + session: SingleStoreSession | undefined; + dialect: SingleStoreDialect; + withList: Subquery[]; + distinct: boolean | undefined; + }, + ) { + super(); + this.config = { + withList, + table, + fields: { ...fields }, + distinct, + setOperators: [], + }; + this.isPartialSelect = isPartialSelect; + this.session = session; + this.dialect = dialect; + this._ = { + selectedFields: fields as TSelectedFields, + } as this['_']; + this.tableName = getTableLikeName(table); + this.joinsNotNullableMap = typeof this.tableName === 'string' ? { [this.tableName]: true } : {}; + } + + private createJoin( + joinType: TJoinType, + ): SingleStoreJoinFn { + return ( + table: SingleStoreTable | Subquery | SQL, // | SingleStoreViewBase + on: ((aliases: TSelection) => SQL | undefined) | SQL | undefined, + ) => { + const baseTableName = this.tableName; + const tableName = getTableLikeName(table); + + if (typeof tableName === 'string' && this.config.joins?.some((join) => join.alias === tableName)) { + throw new Error(`Alias "${tableName}" is already used in this query`); + } + + if (!this.isPartialSelect) { + // If this is the first join and this is not a partial select and we're not selecting from raw SQL, "move" the fields from the main table to the nested object + if (Object.keys(this.joinsNotNullableMap).length === 1 && typeof baseTableName === 'string') { + this.config.fields = { + [baseTableName]: this.config.fields, + }; + } + if (typeof tableName === 'string' && !is(table, SQL)) { + const selection = is(table, Subquery) + ? table._.selectedFields + /* : is(table, View) + ? table[ViewBaseConfig].selectedFields */ + : table[Table.Symbol.Columns]; + this.config.fields[tableName] = selection; + } + } + + if (typeof on === 'function') { + on = on( + new Proxy( + this.config.fields, + new SelectionProxyHandler({ sqlAliasedBehavior: 'sql', sqlBehavior: 'sql' }), + ) as TSelection, + ); + } + + if (!this.config.joins) { + this.config.joins = []; + } + + this.config.joins.push({ on, table, joinType, alias: tableName }); + + if (typeof tableName === 'string') { + switch (joinType) { + case 'left': { + this.joinsNotNullableMap[tableName] = false; + break; + } + case 'right': { + this.joinsNotNullableMap = Object.fromEntries( + Object.entries(this.joinsNotNullableMap).map(([key]) => [key, false]), + ); + this.joinsNotNullableMap[tableName] = true; + break; + } + case 'inner': { + this.joinsNotNullableMap[tableName] = true; + break; + } + case 'full': { + this.joinsNotNullableMap = Object.fromEntries( + Object.entries(this.joinsNotNullableMap).map(([key]) => [key, false]), + ); + this.joinsNotNullableMap[tableName] = false; + break; + } + } + } + + return this as any; + }; + } + + /** + * Executes a `left join` operation by adding another table to the current query. + * + * Calling this method associates each row of the table with the corresponding row from the joined table, if a match is found. If no matching row exists, it sets all columns of the joined table to null. + * + * See docs: {@link https://orm.drizzle.team/docs/joins#left-join} + * + * @param table the table to join. + * @param on the `on` clause. + * + * @example + * + * ```ts + * // Select all users and their pets + * const usersWithPets: { user: User; pets: Pet | null }[] = await db.select() + * .from(users) + * .leftJoin(pets, eq(users.id, pets.ownerId)) + * + * // Select userId and petId + * const usersIdsAndPetIds: { userId: number; petId: number | null }[] = await db.select({ + * userId: users.id, + * petId: pets.id, + * }) + * .from(users) + * .leftJoin(pets, eq(users.id, pets.ownerId)) + * ``` + */ + leftJoin = this.createJoin('left'); + + /** + * Executes a `right join` operation by adding another table to the current query. + * + * Calling this method associates each row of the joined table with the corresponding row from the main table, if a match is found. If no matching row exists, it sets all columns of the main table to null. + * + * See docs: {@link https://orm.drizzle.team/docs/joins#right-join} + * + * @param table the table to join. + * @param on the `on` clause. + * + * @example + * + * ```ts + * // Select all users and their pets + * const usersWithPets: { user: User | null; pets: Pet }[] = await db.select() + * .from(users) + * .rightJoin(pets, eq(users.id, pets.ownerId)) + * + * // Select userId and petId + * const usersIdsAndPetIds: { userId: number | null; petId: number }[] = await db.select({ + * userId: users.id, + * petId: pets.id, + * }) + * .from(users) + * .rightJoin(pets, eq(users.id, pets.ownerId)) + * ``` + */ + rightJoin = this.createJoin('right'); + + /** + * Executes an `inner join` operation, creating a new table by combining rows from two tables that have matching values. + * + * Calling this method retrieves rows that have corresponding entries in both joined tables. Rows without matching entries in either table are excluded, resulting in a table that includes only matching pairs. + * + * See docs: {@link https://orm.drizzle.team/docs/joins#inner-join} + * + * @param table the table to join. + * @param on the `on` clause. + * + * @example + * + * ```ts + * // Select all users and their pets + * const usersWithPets: { user: User; pets: Pet }[] = await db.select() + * .from(users) + * .innerJoin(pets, eq(users.id, pets.ownerId)) + * + * // Select userId and petId + * const usersIdsAndPetIds: { userId: number; petId: number }[] = await db.select({ + * userId: users.id, + * petId: pets.id, + * }) + * .from(users) + * .innerJoin(pets, eq(users.id, pets.ownerId)) + * ``` + */ + innerJoin = this.createJoin('inner'); + + /** + * Executes a `full join` operation by combining rows from two tables into a new table. + * + * Calling this method retrieves all rows from both main and joined tables, merging rows with matching values and filling in `null` for non-matching columns. + * + * See docs: {@link https://orm.drizzle.team/docs/joins#full-join} + * + * @param table the table to join. + * @param on the `on` clause. + * + * @example + * + * ```ts + * // Select all users and their pets + * const usersWithPets: { user: User | null; pets: Pet | null }[] = await db.select() + * .from(users) + * .fullJoin(pets, eq(users.id, pets.ownerId)) + * + * // Select userId and petId + * const usersIdsAndPetIds: { userId: number | null; petId: number | null }[] = await db.select({ + * userId: users.id, + * petId: pets.id, + * }) + * .from(users) + * .fullJoin(pets, eq(users.id, pets.ownerId)) + * ``` + */ + fullJoin = this.createJoin('full'); + + private createSetOperator( + type: SetOperator, + isAll: boolean, + ): >( + rightSelection: + | ((setOperators: GetSingleStoreSetOperators) => SetOperatorRightSelect) + | SetOperatorRightSelect, + ) => SingleStoreSelectWithout< + this, + TDynamic, + SingleStoreSetOperatorExcludedMethods, + true + > { + return (rightSelection) => { + const rightSelect = (typeof rightSelection === 'function' + ? rightSelection(getSingleStoreSetOperators()) + : rightSelection) as TypedQueryBuilder< + any, + TResult + >; + + if (!haveSameKeys(this.getSelectedFields(), rightSelect.getSelectedFields())) { + throw new Error( + 'Set operator error (union / intersect / except): selected fields are not the same or are in a different order', + ); + } + + this.config.setOperators.push({ type, isAll, rightSelect }); + return this as any; + }; + } + + /** + * Adds `union` set operator to the query. + * + * Calling this method will combine the result sets of the `select` statements and remove any duplicate rows that appear across them. + * + * See docs: {@link https://orm.drizzle.team/docs/set-operations#union} + * + * @example + * + * ```ts + * // Select all unique names from customers and users tables + * await db.select({ name: users.name }) + * .from(users) + * .union( + * db.select({ name: customers.name }).from(customers) + * ); + * // or + * import { union } from 'drizzle-orm/singlestore-core' + * + * await union( + * db.select({ name: users.name }).from(users), + * db.select({ name: customers.name }).from(customers) + * ); + * ``` + */ + union = this.createSetOperator('union', false); + + /** + * Adds `union all` set operator to the query. + * + * Calling this method will combine the result-set of the `select` statements and keep all duplicate rows that appear across them. + * + * See docs: {@link https://orm.drizzle.team/docs/set-operations#union-all} + * + * @example + * + * ```ts + * // Select all transaction ids from both online and in-store sales + * await db.select({ transaction: onlineSales.transactionId }) + * .from(onlineSales) + * .unionAll( + * db.select({ transaction: inStoreSales.transactionId }).from(inStoreSales) + * ); + * // or + * import { unionAll } from 'drizzle-orm/singlestore-core' + * + * await unionAll( + * db.select({ transaction: onlineSales.transactionId }).from(onlineSales), + * db.select({ transaction: inStoreSales.transactionId }).from(inStoreSales) + * ); + * ``` + */ + unionAll = this.createSetOperator('union', true); + + /** + * Adds `intersect` set operator to the query. + * + * Calling this method will retain only the rows that are present in both result sets and eliminate duplicates. + * + * See docs: {@link https://orm.drizzle.team/docs/set-operations#intersect} + * + * @example + * + * ```ts + * // Select course names that are offered in both departments A and B + * await db.select({ courseName: depA.courseName }) + * .from(depA) + * .intersect( + * db.select({ courseName: depB.courseName }).from(depB) + * ); + * // or + * import { intersect } from 'drizzle-orm/singlestore-core' + * + * await intersect( + * db.select({ courseName: depA.courseName }).from(depA), + * db.select({ courseName: depB.courseName }).from(depB) + * ); + * ``` + */ + intersect = this.createSetOperator('intersect', false); + + /** + * Adds `except` set operator to the query. + * + * Calling this method will retrieve all unique rows from the left query, except for the rows that are present in the result set of the right query. + * + * See docs: {@link https://orm.drizzle.team/docs/set-operations#except} + * + * @example + * + * ```ts + * // Select all courses offered in department A but not in department B + * await db.select({ courseName: depA.courseName }) + * .from(depA) + * .except( + * db.select({ courseName: depB.courseName }).from(depB) + * ); + * // or + * import { except } from 'drizzle-orm/singlestore-core' + * + * await except( + * db.select({ courseName: depA.courseName }).from(depA), + * db.select({ courseName: depB.courseName }).from(depB) + * ); + * ``` + */ + except = this.createSetOperator('except', false); + + /** + * Adds `minus` set operator to the query. + * + * This is an alias of `except` supported by SingleStore. + * + * @example + * + * ```ts + * // Select all courses offered in department A but not in department B + * await db.select({ courseName: depA.courseName }) + * .from(depA) + * .minus( + * db.select({ courseName: depB.courseName }).from(depB) + * ); + * // or + * import { minus } from 'drizzle-orm/singlestore-core' + * + * await minus( + * db.select({ courseName: depA.courseName }).from(depA), + * db.select({ courseName: depB.courseName }).from(depB) + * ); + * ``` + */ + minus = this.createSetOperator('except', false); + + /** @internal */ + addSetOperators(setOperators: SingleStoreSelectConfig['setOperators']): SingleStoreSelectWithout< + this, + TDynamic, + SingleStoreSetOperatorExcludedMethods, + true + > { + this.config.setOperators.push(...setOperators); + return this as any; + } + + /** + * Adds a `where` clause to the query. + * + * Calling this method will select only those rows that fulfill a specified condition. + * + * See docs: {@link https://orm.drizzle.team/docs/select#filtering} + * + * @param where the `where` clause. + * + * @example + * You can use conditional operators and `sql function` to filter the rows to be selected. + * + * ```ts + * // Select all cars with green color + * await db.select().from(cars).where(eq(cars.color, 'green')); + * // or + * await db.select().from(cars).where(sql`${cars.color} = 'green'`) + * ``` + * + * You can logically combine conditional operators with `and()` and `or()` operators: + * + * ```ts + * // Select all BMW cars with a green color + * await db.select().from(cars).where(and(eq(cars.color, 'green'), eq(cars.brand, 'BMW'))); + * + * // Select all cars with the green or blue color + * await db.select().from(cars).where(or(eq(cars.color, 'green'), eq(cars.color, 'blue'))); + * ``` + */ + where( + where: ((aliases: this['_']['selection']) => SQL | undefined) | SQL | undefined, + ): SingleStoreSelectWithout { + if (typeof where === 'function') { + where = where( + new Proxy( + this.config.fields, + new SelectionProxyHandler({ sqlAliasedBehavior: 'sql', sqlBehavior: 'sql' }), + ) as TSelection, + ); + } + this.config.where = where; + return this as any; + } + + /** + * Adds a `having` clause to the query. + * + * Calling this method will select only those rows that fulfill a specified condition. It is typically used with aggregate functions to filter the aggregated data based on a specified condition. + * + * See docs: {@link https://orm.drizzle.team/docs/select#aggregations} + * + * @param having the `having` clause. + * + * @example + * + * ```ts + * // Select all brands with more than one car + * await db.select({ + * brand: cars.brand, + * count: sql`cast(count(${cars.id}) as int)`, + * }) + * .from(cars) + * .groupBy(cars.brand) + * .having(({ count }) => gt(count, 1)); + * ``` + */ + having( + having: ((aliases: this['_']['selection']) => SQL | undefined) | SQL | undefined, + ): SingleStoreSelectWithout { + if (typeof having === 'function') { + having = having( + new Proxy( + this.config.fields, + new SelectionProxyHandler({ sqlAliasedBehavior: 'sql', sqlBehavior: 'sql' }), + ) as TSelection, + ); + } + this.config.having = having; + return this as any; + } + + /** + * Adds a `group by` clause to the query. + * + * Calling this method will group rows that have the same values into summary rows, often used for aggregation purposes. + * + * See docs: {@link https://orm.drizzle.team/docs/select#aggregations} + * + * @example + * + * ```ts + * // Group and count people by their last names + * await db.select({ + * lastName: people.lastName, + * count: sql`cast(count(*) as int)` + * }) + * .from(people) + * .groupBy(people.lastName); + * ``` + */ + groupBy( + builder: (aliases: this['_']['selection']) => ValueOrArray, + ): SingleStoreSelectWithout; + groupBy(...columns: (SingleStoreColumn | SQL | SQL.Aliased)[]): SingleStoreSelectWithout; + groupBy( + ...columns: + | [(aliases: this['_']['selection']) => ValueOrArray] + | (SingleStoreColumn | SQL | SQL.Aliased)[] + ): SingleStoreSelectWithout { + if (typeof columns[0] === 'function') { + const groupBy = columns[0]( + new Proxy( + this.config.fields, + new SelectionProxyHandler({ sqlAliasedBehavior: 'alias', sqlBehavior: 'sql' }), + ) as TSelection, + ); + this.config.groupBy = Array.isArray(groupBy) ? groupBy : [groupBy]; + } else { + this.config.groupBy = columns as (SingleStoreColumn | SQL | SQL.Aliased)[]; + } + return this as any; + } + + /** + * Adds an `order by` clause to the query. + * + * Calling this method will sort the result-set in ascending or descending order. By default, the sort order is ascending. + * + * See docs: {@link https://orm.drizzle.team/docs/select#order-by} + * + * @example + * + * ``` + * // Select cars ordered by year + * await db.select().from(cars).orderBy(cars.year); + * ``` + * + * You can specify whether results are in ascending or descending order with the `asc()` and `desc()` operators. + * + * ```ts + * // Select cars ordered by year in descending order + * await db.select().from(cars).orderBy(desc(cars.year)); + * + * // Select cars ordered by year and price + * await db.select().from(cars).orderBy(asc(cars.year), desc(cars.price)); + * ``` + */ + orderBy( + builder: (aliases: this['_']['selection']) => ValueOrArray, + ): SingleStoreSelectWithout; + orderBy(...columns: (SingleStoreColumn | SQL | SQL.Aliased)[]): SingleStoreSelectWithout; + orderBy( + ...columns: + | [(aliases: this['_']['selection']) => ValueOrArray] + | (SingleStoreColumn | SQL | SQL.Aliased)[] + ): SingleStoreSelectWithout { + if (typeof columns[0] === 'function') { + const orderBy = columns[0]( + new Proxy( + this.config.fields, + new SelectionProxyHandler({ sqlAliasedBehavior: 'alias', sqlBehavior: 'sql' }), + ) as TSelection, + ); + + const orderByArray = Array.isArray(orderBy) ? orderBy : [orderBy]; + + if (this.config.setOperators.length > 0) { + this.config.setOperators.at(-1)!.orderBy = orderByArray; + } else { + this.config.orderBy = orderByArray; + } + } else { + const orderByArray = columns as (SingleStoreColumn | SQL | SQL.Aliased)[]; + + if (this.config.setOperators.length > 0) { + this.config.setOperators.at(-1)!.orderBy = orderByArray; + } else { + this.config.orderBy = orderByArray; + } + } + return this as any; + } + + /** + * Adds a `limit` clause to the query. + * + * Calling this method will set the maximum number of rows that will be returned by this query. + * + * See docs: {@link https://orm.drizzle.team/docs/select#limit--offset} + * + * @param limit the `limit` clause. + * + * @example + * + * ```ts + * // Get the first 10 people from this query. + * await db.select().from(people).limit(10); + * ``` + */ + limit(limit: number): SingleStoreSelectWithout { + if (this.config.setOperators.length > 0) { + this.config.setOperators.at(-1)!.limit = limit; + } else { + this.config.limit = limit; + } + return this as any; + } + + /** + * Adds an `offset` clause to the query. + * + * Calling this method will skip a number of rows when returning results from this query. + * + * See docs: {@link https://orm.drizzle.team/docs/select#limit--offset} + * + * @param offset the `offset` clause. + * + * @example + * + * ```ts + * // Get the 10th-20th people from this query. + * await db.select().from(people).offset(10).limit(10); + * ``` + */ + offset(offset: number): SingleStoreSelectWithout { + if (this.config.setOperators.length > 0) { + this.config.setOperators.at(-1)!.offset = offset; + } else { + this.config.offset = offset; + } + return this as any; + } + + /** + * Adds a `for` clause to the query. + * + * Calling this method will specify a lock strength for this query that controls how strictly it acquires exclusive access to the rows being queried. + * + * @param strength the lock strength. + * @param config the lock configuration. + */ + for(strength: LockStrength, config: LockConfig = {}): SingleStoreSelectWithout { + this.config.lockingClause = { strength, config }; + return this as any; + } + + /** @internal */ + getSQL(): SQL { + return this.dialect.buildSelectQuery(this.config); + } + + toSQL(): Query { + const { typings: _typings, ...rest } = this.dialect.sqlToQuery(this.getSQL()); + return rest; + } + + as( + alias: TAlias, + ): SubqueryWithSelection { + return new Proxy( + new Subquery(this.getSQL(), this.config.fields, alias), + new SelectionProxyHandler({ alias, sqlAliasedBehavior: 'alias', sqlBehavior: 'error' }), + ) as SubqueryWithSelection; + } + + /** @internal */ + override getSelectedFields(): this['_']['selectedFields'] { + return new Proxy( + this.config.fields, + new SelectionProxyHandler({ alias: this.tableName, sqlAliasedBehavior: 'alias', sqlBehavior: 'error' }), + ) as this['_']['selectedFields']; + } + + $dynamic(): SingleStoreSelectDynamic { + return this as any; + } +} + +export interface SingleStoreSelectBase< + TTableName extends string | undefined, + TSelection extends ColumnsSelection, + TSelectMode extends SelectMode, + TPreparedQueryHKT extends PreparedQueryHKTBase, + TNullabilityMap extends Record = TTableName extends string ? Record + : {}, + TDynamic extends boolean = false, + TExcludedMethods extends string = never, + TResult extends any[] = SelectResult[], + TSelectedFields extends ColumnsSelection = BuildSubquerySelection, +> extends + SingleStoreSelectQueryBuilderBase< + SingleStoreSelectHKT, + TTableName, + TSelection, + TSelectMode, + TPreparedQueryHKT, + TNullabilityMap, + TDynamic, + TExcludedMethods, + TResult, + TSelectedFields + >, + QueryPromise +{} + +export class SingleStoreSelectBase< + TTableName extends string | undefined, + TSelection, + TSelectMode extends SelectMode, + TPreparedQueryHKT extends PreparedQueryHKTBase, + TNullabilityMap extends Record = TTableName extends string ? Record + : {}, + TDynamic extends boolean = false, + TExcludedMethods extends string = never, + TResult = SelectResult[], + TSelectedFields = BuildSubquerySelection, +> extends SingleStoreSelectQueryBuilderBase< + SingleStoreSelectHKT, + TTableName, + TSelection, + TSelectMode, + TPreparedQueryHKT, + TNullabilityMap, + TDynamic, + TExcludedMethods, + TResult, + TSelectedFields +> { + static override readonly [entityKind]: string = 'SingleStoreSelect'; + + prepare(): SingleStoreSelectPrepare { + if (!this.session) { + throw new Error('Cannot execute a query on a query builder. Please use a database instance instead.'); + } + const fieldsList = orderSelectedFields(this.config.fields); + const query = this.session.prepareQuery< + SingleStorePreparedQueryConfig & { execute: SelectResult[] }, + TPreparedQueryHKT + >(this.dialect.sqlToQuery(this.getSQL()), fieldsList); + query.joinsNotNullableMap = this.joinsNotNullableMap; + return query as SingleStoreSelectPrepare; + } + + execute = ((placeholderValues) => { + return this.prepare().execute(placeholderValues); + }) as ReturnType['execute']; + + private createIterator = (): ReturnType['iterator'] => { + const self = this; + return async function*(placeholderValues) { + yield* self.prepare().iterator(placeholderValues); + }; + }; + + iterator = this.createIterator(); +} + +applyMixins(SingleStoreSelectBase, [QueryPromise]); + +function createSetOperator(type: SetOperator, isAll: boolean): SingleStoreCreateSetOperatorFn { + return (leftSelect, rightSelect, ...restSelects) => { + const setOperators = [rightSelect, ...restSelects].map((select) => ({ + type, + isAll, + rightSelect: select as AnySingleStoreSelect, + })); + + for (const setOperator of setOperators) { + if (!haveSameKeys((leftSelect as any).getSelectedFields(), setOperator.rightSelect.getSelectedFields())) { + throw new Error( + 'Set operator error (union / intersect / except): selected fields are not the same or are in a different order', + ); + } + } + + return (leftSelect as AnySingleStoreSelect).addSetOperators(setOperators) as any; + }; +} + +const getSingleStoreSetOperators = () => ({ + union, + unionAll, + intersect, + except, + minus, +}); + +/** + * Adds `union` set operator to the query. + * + * Calling this method will combine the result sets of the `select` statements and remove any duplicate rows that appear across them. + * + * See docs: {@link https://orm.drizzle.team/docs/set-operations#union} + * + * @example + * + * ```ts + * // Select all unique names from customers and users tables + * import { union } from 'drizzle-orm/singlestore-core' + * + * await union( + * db.select({ name: users.name }).from(users), + * db.select({ name: customers.name }).from(customers) + * ); + * // or + * await db.select({ name: users.name }) + * .from(users) + * .union( + * db.select({ name: customers.name }).from(customers) + * ); + * ``` + */ +export const union = createSetOperator('union', false); + +/** + * Adds `union all` set operator to the query. + * + * Calling this method will combine the result-set of the `select` statements and keep all duplicate rows that appear across them. + * + * See docs: {@link https://orm.drizzle.team/docs/set-operations#union-all} + * + * @example + * + * ```ts + * // Select all transaction ids from both online and in-store sales + * import { unionAll } from 'drizzle-orm/singlestore-core' + * + * await unionAll( + * db.select({ transaction: onlineSales.transactionId }).from(onlineSales), + * db.select({ transaction: inStoreSales.transactionId }).from(inStoreSales) + * ); + * // or + * await db.select({ transaction: onlineSales.transactionId }) + * .from(onlineSales) + * .unionAll( + * db.select({ transaction: inStoreSales.transactionId }).from(inStoreSales) + * ); + * ``` + */ +export const unionAll = createSetOperator('union', true); + +/** + * Adds `intersect` set operator to the query. + * + * Calling this method will retain only the rows that are present in both result sets and eliminate duplicates. + * + * See docs: {@link https://orm.drizzle.team/docs/set-operations#intersect} + * + * @example + * + * ```ts + * // Select course names that are offered in both departments A and B + * import { intersect } from 'drizzle-orm/singlestore-core' + * + * await intersect( + * db.select({ courseName: depA.courseName }).from(depA), + * db.select({ courseName: depB.courseName }).from(depB) + * ); + * // or + * await db.select({ courseName: depA.courseName }) + * .from(depA) + * .intersect( + * db.select({ courseName: depB.courseName }).from(depB) + * ); + * ``` + */ +export const intersect = createSetOperator('intersect', false); + +/** + * Adds `except` set operator to the query. + * + * Calling this method will retrieve all unique rows from the left query, except for the rows that are present in the result set of the right query. + * + * See docs: {@link https://orm.drizzle.team/docs/set-operations#except} + * + * @example + * + * ```ts + * // Select all courses offered in department A but not in department B + * import { except } from 'drizzle-orm/singlestore-core' + * + * await except( + * db.select({ courseName: depA.courseName }).from(depA), + * db.select({ courseName: depB.courseName }).from(depB) + * ); + * // or + * await db.select({ courseName: depA.courseName }) + * .from(depA) + * .except( + * db.select({ courseName: depB.courseName }).from(depB) + * ); + * ``` + */ +export const except = createSetOperator('except', false); + +/** + * Adds `minus` set operator to the query. + * + * This is an alias of `except` supported by SingleStore. + * + * @example + * + * ```ts + * // Select all courses offered in department A but not in department B + * import { minus } from 'drizzle-orm/singlestore-core' + * + * await minus( + * db.select({ courseName: depA.courseName }).from(depA), + * db.select({ courseName: depB.courseName }).from(depB) + * ); + * // or + * await db.select({ courseName: depA.courseName }) + * .from(depA) + * .minus( + * db.select({ courseName: depB.courseName }).from(depB) + * ); + * ``` + */ +export const minus = createSetOperator('except', true); diff --git a/drizzle-orm/src/singlestore-core/query-builders/select.types.ts b/drizzle-orm/src/singlestore-core/query-builders/select.types.ts new file mode 100644 index 000000000..75bc8d783 --- /dev/null +++ b/drizzle-orm/src/singlestore-core/query-builders/select.types.ts @@ -0,0 +1,457 @@ +import type { + SelectedFields as SelectedFieldsBase, + SelectedFieldsFlat as SelectedFieldsFlatBase, + SelectedFieldsOrdered as SelectedFieldsOrderedBase, +} from '~/operations.ts'; +import type { TypedQueryBuilder } from '~/query-builders/query-builder.ts'; +import type { + AppendToNullabilityMap, + AppendToResult, + BuildSubquerySelection, + GetSelectTableName, + JoinNullability, + JoinType, + MapColumnsToTableAlias, + SelectMode, + SelectResult, + SetOperator, +} from '~/query-builders/select.types.ts'; +import type { SingleStoreColumn } from '~/singlestore-core/columns/index.ts'; +import type { SingleStoreTable, SingleStoreTableWithColumns } from '~/singlestore-core/table.ts'; +import type { ColumnsSelection, Placeholder, SQL, View } from '~/sql/sql.ts'; +import type { Subquery } from '~/subquery.ts'; +import type { Table, UpdateTableConfig } from '~/table.ts'; +import type { Assume, ValidateShape } from '~/utils.ts'; +import type { PreparedQueryHKTBase, PreparedQueryKind, SingleStorePreparedQueryConfig } from '../session.ts'; +/* import type { SingleStoreViewBase } from '../view-base.ts'; */ +/* import type { SingleStoreViewWithSelection } from '../view.ts'; */ +import type { SingleStoreSelectBase, SingleStoreSelectQueryBuilderBase } from './select.ts'; + +export interface SingleStoreSelectJoinConfig { + on: SQL | undefined; + table: SingleStoreTable | Subquery | SQL; // SingleStoreViewBase | + alias: string | undefined; + joinType: JoinType; + lateral?: boolean; +} + +export type BuildAliasTable = TTable extends Table + ? SingleStoreTableWithColumns< + UpdateTableConfig; + }> + > + /* : TTable extends View ? SingleStoreViewWithSelection< + TAlias, + TTable['_']['existing'], + MapColumnsToTableAlias + > */ + : never; + +export interface SingleStoreSelectConfig { + withList?: Subquery[]; + fields: Record; + fieldsFlat?: SelectedFieldsOrdered; + where?: SQL; + having?: SQL; + table: SingleStoreTable | Subquery | SQL; // | SingleStoreViewBase + limit?: number | Placeholder; + offset?: number | Placeholder; + joins?: SingleStoreSelectJoinConfig[]; + orderBy?: (SingleStoreColumn | SQL | SQL.Aliased)[]; + groupBy?: (SingleStoreColumn | SQL | SQL.Aliased)[]; + lockingClause?: { + strength: LockStrength; + config: LockConfig; + }; + distinct?: boolean; + setOperators: { + rightSelect: TypedQueryBuilder; + type: SetOperator; + isAll: boolean; + orderBy?: (SingleStoreColumn | SQL | SQL.Aliased)[]; + limit?: number | Placeholder; + offset?: number | Placeholder; + }[]; +} + +export type SingleStoreJoin< + T extends AnySingleStoreSelectQueryBuilder, + TDynamic extends boolean, + TJoinType extends JoinType, + TJoinedTable extends SingleStoreTable | Subquery | SQL, // | SingleStoreViewBase + TJoinedName extends GetSelectTableName = GetSelectTableName, +> = T extends any ? SingleStoreSelectWithout< + SingleStoreSelectKind< + T['_']['hkt'], + T['_']['tableName'], + AppendToResult< + T['_']['tableName'], + T['_']['selection'], + TJoinedName, + TJoinedTable extends SingleStoreTable ? TJoinedTable['_']['columns'] + : TJoinedTable extends Subquery ? Assume + : never, + T['_']['selectMode'] + >, + T['_']['selectMode'] extends 'partial' ? T['_']['selectMode'] : 'multiple', + T['_']['preparedQueryHKT'], + AppendToNullabilityMap, + TDynamic, + T['_']['excludedMethods'] + >, + TDynamic, + T['_']['excludedMethods'] + > + : never; + +export type SingleStoreJoinFn< + T extends AnySingleStoreSelectQueryBuilder, + TDynamic extends boolean, + TJoinType extends JoinType, +> = < + TJoinedTable extends SingleStoreTable | Subquery | SQL, // | SingleStoreViewBase + TJoinedName extends GetSelectTableName = GetSelectTableName, +>( + table: TJoinedTable, + on: ((aliases: T['_']['selection']) => SQL | undefined) | SQL | undefined, +) => SingleStoreJoin; + +export type SelectedFieldsFlat = SelectedFieldsFlatBase; + +export type SelectedFields = SelectedFieldsBase; + +export type SelectedFieldsOrdered = SelectedFieldsOrderedBase; + +export type LockStrength = 'update' | 'share'; + +export type LockConfig = { + noWait: true; + skipLocked?: undefined; +} | { + noWait?: undefined; + skipLocked: true; +} | { + noWait?: undefined; + skipLocked?: undefined; +}; + +export interface SingleStoreSelectHKTBase { + tableName: string | undefined; + selection: unknown; + selectMode: SelectMode; + preparedQueryHKT: unknown; + nullabilityMap: unknown; + dynamic: boolean; + excludedMethods: string; + result: unknown; + selectedFields: unknown; + _type: unknown; +} + +export type SingleStoreSelectKind< + T extends SingleStoreSelectHKTBase, + TTableName extends string | undefined, + TSelection extends ColumnsSelection, + TSelectMode extends SelectMode, + TPreparedQueryHKT extends PreparedQueryHKTBase, + TNullabilityMap extends Record, + TDynamic extends boolean, + TExcludedMethods extends string, + TResult = SelectResult[], + TSelectedFields = BuildSubquerySelection, +> = (T & { + tableName: TTableName; + selection: TSelection; + selectMode: TSelectMode; + preparedQueryHKT: TPreparedQueryHKT; + nullabilityMap: TNullabilityMap; + dynamic: TDynamic; + excludedMethods: TExcludedMethods; + result: TResult; + selectedFields: TSelectedFields; +})['_type']; + +export interface SingleStoreSelectQueryBuilderHKT extends SingleStoreSelectHKTBase { + _type: SingleStoreSelectQueryBuilderBase< + SingleStoreSelectQueryBuilderHKT, + this['tableName'], + Assume, + this['selectMode'], + Assume, + Assume>, + this['dynamic'], + this['excludedMethods'], + Assume, + Assume + >; +} + +export interface SingleStoreSelectHKT extends SingleStoreSelectHKTBase { + _type: SingleStoreSelectBase< + this['tableName'], + Assume, + this['selectMode'], + Assume, + Assume>, + this['dynamic'], + this['excludedMethods'], + Assume, + Assume + >; +} + +export type SingleStoreSetOperatorExcludedMethods = + | 'where' + | 'having' + | 'groupBy' + | 'session' + | 'leftJoin' + | 'rightJoin' + | 'innerJoin' + | 'fullJoin' + | 'for'; + +export type SingleStoreSelectWithout< + T extends AnySingleStoreSelectQueryBuilder, + TDynamic extends boolean, + K extends keyof T & string, + TResetExcluded extends boolean = false, +> = TDynamic extends true ? T : Omit< + SingleStoreSelectKind< + T['_']['hkt'], + T['_']['tableName'], + T['_']['selection'], + T['_']['selectMode'], + T['_']['preparedQueryHKT'], + T['_']['nullabilityMap'], + TDynamic, + TResetExcluded extends true ? K : T['_']['excludedMethods'] | K, + T['_']['result'], + T['_']['selectedFields'] + >, + TResetExcluded extends true ? K : T['_']['excludedMethods'] | K +>; + +export type SingleStoreSelectPrepare = PreparedQueryKind< + T['_']['preparedQueryHKT'], + SingleStorePreparedQueryConfig & { + execute: T['_']['result']; + iterator: T['_']['result'][number]; + }, + true +>; + +export type SingleStoreSelectDynamic = SingleStoreSelectKind< + T['_']['hkt'], + T['_']['tableName'], + T['_']['selection'], + T['_']['selectMode'], + T['_']['preparedQueryHKT'], + T['_']['nullabilityMap'], + true, + never, + T['_']['result'], + T['_']['selectedFields'] +>; + +export type CreateSingleStoreSelectFromBuilderMode< + TBuilderMode extends 'db' | 'qb', + TTableName extends string | undefined, + TSelection extends ColumnsSelection, + TSelectMode extends SelectMode, + TPreparedQueryHKT extends PreparedQueryHKTBase, +> = TBuilderMode extends 'db' ? SingleStoreSelectBase + : SingleStoreSelectQueryBuilderBase< + SingleStoreSelectQueryBuilderHKT, + TTableName, + TSelection, + TSelectMode, + TPreparedQueryHKT + >; + +export type SingleStoreSelectQueryBuilder< + THKT extends SingleStoreSelectHKTBase = SingleStoreSelectQueryBuilderHKT, + TTableName extends string | undefined = string | undefined, + TSelection extends ColumnsSelection = ColumnsSelection, + TSelectMode extends SelectMode = SelectMode, + TPreparedQueryHKT extends PreparedQueryHKTBase = PreparedQueryHKTBase, + TNullabilityMap extends Record = Record, + TResult extends any[] = unknown[], + TSelectedFields extends ColumnsSelection = ColumnsSelection, +> = SingleStoreSelectQueryBuilderBase< + THKT, + TTableName, + TSelection, + TSelectMode, + TPreparedQueryHKT, + TNullabilityMap, + true, + never, + TResult, + TSelectedFields +>; + +export type AnySingleStoreSelectQueryBuilder = SingleStoreSelectQueryBuilderBase< + any, + any, + any, + any, + any, + any, + any, + any, + any +>; + +export type AnySingleStoreSetOperatorInterface = SingleStoreSetOperatorInterface< + any, + any, + any, + any, + any, + any, + any, + any, + any +>; + +export interface SingleStoreSetOperatorInterface< + TTableName extends string | undefined, + TSelection extends ColumnsSelection, + TSelectMode extends SelectMode, + TPreparedQueryHKT extends PreparedQueryHKTBase = PreparedQueryHKTBase, + TNullabilityMap extends Record = TTableName extends string ? Record + : {}, + TDynamic extends boolean = false, + TExcludedMethods extends string = never, + TResult extends any[] = SelectResult[], + TSelectedFields extends ColumnsSelection = BuildSubquerySelection, +> { + _: { + readonly hkt: SingleStoreSelectHKT; + readonly tableName: TTableName; + readonly selection: TSelection; + readonly selectMode: TSelectMode; + readonly preparedQueryHKT: TPreparedQueryHKT; + readonly nullabilityMap: TNullabilityMap; + readonly dynamic: TDynamic; + readonly excludedMethods: TExcludedMethods; + readonly result: TResult; + readonly selectedFields: TSelectedFields; + }; +} + +export type SingleStoreSetOperatorWithResult = SingleStoreSetOperatorInterface< + any, + any, + any, + any, + any, + any, + any, + TResult, + any +>; + +export type SingleStoreSelect< + TTableName extends string | undefined = string | undefined, + TSelection extends ColumnsSelection = Record, + TSelectMode extends SelectMode = SelectMode, + TNullabilityMap extends Record = Record, +> = SingleStoreSelectBase; + +export type AnySingleStoreSelect = SingleStoreSelectBase; + +export type SingleStoreSetOperator< + TTableName extends string | undefined = string | undefined, + TSelection extends ColumnsSelection = Record, + TSelectMode extends SelectMode = SelectMode, + TPreparedQueryHKT extends PreparedQueryHKTBase = PreparedQueryHKTBase, + TNullabilityMap extends Record = Record, +> = SingleStoreSelectBase< + TTableName, + TSelection, + TSelectMode, + TPreparedQueryHKT, + TNullabilityMap, + true, + SingleStoreSetOperatorExcludedMethods +>; + +export type SetOperatorRightSelect< + TValue extends SingleStoreSetOperatorWithResult, + TResult extends any[], +> = TValue extends SingleStoreSetOperatorInterface + ? ValidateShape< + TValueResult[number], + TResult[number], + TypedQueryBuilder + > + : TValue; + +export type SetOperatorRestSelect< + TValue extends readonly SingleStoreSetOperatorWithResult[], + TResult extends any[], +> = TValue extends [infer First, ...infer Rest] + ? First extends SingleStoreSetOperatorInterface + ? Rest extends AnySingleStoreSetOperatorInterface[] ? [ + ValidateShape>, + ...SetOperatorRestSelect, + ] + : ValidateShape[]> + : never + : TValue; + +export type SingleStoreCreateSetOperatorFn = < + TTableName extends string | undefined, + TSelection extends ColumnsSelection, + TSelectMode extends SelectMode, + TValue extends SingleStoreSetOperatorWithResult, + TRest extends SingleStoreSetOperatorWithResult[], + TPreparedQueryHKT extends PreparedQueryHKTBase = PreparedQueryHKTBase, + TNullabilityMap extends Record = TTableName extends string ? Record + : {}, + TDynamic extends boolean = false, + TExcludedMethods extends string = never, + TResult extends any[] = SelectResult[], + TSelectedFields extends ColumnsSelection = BuildSubquerySelection, +>( + leftSelect: SingleStoreSetOperatorInterface< + TTableName, + TSelection, + TSelectMode, + TPreparedQueryHKT, + TNullabilityMap, + TDynamic, + TExcludedMethods, + TResult, + TSelectedFields + >, + rightSelect: SetOperatorRightSelect, + ...restSelects: SetOperatorRestSelect +) => SingleStoreSelectWithout< + SingleStoreSelectBase< + TTableName, + TSelection, + TSelectMode, + TPreparedQueryHKT, + TNullabilityMap, + TDynamic, + TExcludedMethods, + TResult, + TSelectedFields + >, + false, + SingleStoreSetOperatorExcludedMethods, + true +>; + +export type GetSingleStoreSetOperators = { + union: SingleStoreCreateSetOperatorFn; + intersect: SingleStoreCreateSetOperatorFn; + except: SingleStoreCreateSetOperatorFn; + unionAll: SingleStoreCreateSetOperatorFn; + minus: SingleStoreCreateSetOperatorFn; +}; diff --git a/drizzle-orm/src/singlestore-core/query-builders/update.ts b/drizzle-orm/src/singlestore-core/query-builders/update.ts new file mode 100644 index 000000000..40ca97662 --- /dev/null +++ b/drizzle-orm/src/singlestore-core/query-builders/update.ts @@ -0,0 +1,251 @@ +import type { GetColumnData } from '~/column.ts'; +import { entityKind } from '~/entity.ts'; +import { QueryPromise } from '~/query-promise.ts'; +import { SelectionProxyHandler } from '~/selection-proxy.ts'; +import type { SingleStoreDialect } from '~/singlestore-core/dialect.ts'; +import type { + AnySingleStoreQueryResultHKT, + PreparedQueryHKTBase, + PreparedQueryKind, + SingleStorePreparedQueryConfig, + SingleStoreQueryResultHKT, + SingleStoreQueryResultKind, + SingleStoreSession, +} from '~/singlestore-core/session.ts'; +import type { SingleStoreTable } from '~/singlestore-core/table.ts'; +import type { Placeholder, Query, SQL, SQLWrapper } from '~/sql/sql.ts'; +import type { Subquery } from '~/subquery.ts'; +import { Table } from '~/table.ts'; +import { mapUpdateSet, type UpdateSet, type ValueOrArray } from '~/utils.ts'; +import type { SingleStoreColumn } from '../columns/common.ts'; +import type { SelectedFieldsOrdered } from './select.types.ts'; + +export interface SingleStoreUpdateConfig { + where?: SQL | undefined; + limit?: number | Placeholder; + orderBy?: (SingleStoreColumn | SQL | SQL.Aliased)[]; + set: UpdateSet; + table: SingleStoreTable; + returning?: SelectedFieldsOrdered; + withList?: Subquery[]; +} + +export type SingleStoreUpdateSetSource = + & { + [Key in keyof TTable['$inferInsert']]?: + | GetColumnData + | SQL; + } + & {}; + +export class SingleStoreUpdateBuilder< + TTable extends SingleStoreTable, + TQueryResult extends SingleStoreQueryResultHKT, + TPreparedQueryHKT extends PreparedQueryHKTBase, +> { + static readonly [entityKind]: string = 'SingleStoreUpdateBuilder'; + + declare readonly _: { + readonly table: TTable; + }; + + constructor( + private table: TTable, + private session: SingleStoreSession, + private dialect: SingleStoreDialect, + private withList?: Subquery[], + ) {} + + set(values: SingleStoreUpdateSetSource): SingleStoreUpdateBase { + return new SingleStoreUpdateBase( + this.table, + mapUpdateSet(this.table, values), + this.session, + this.dialect, + this.withList, + ); + } +} + +export type SingleStoreUpdateWithout< + T extends AnySingleStoreUpdateBase, + TDynamic extends boolean, + K extends keyof T & string, +> = TDynamic extends true ? T : Omit< + SingleStoreUpdateBase< + T['_']['table'], + T['_']['queryResult'], + T['_']['preparedQueryHKT'], + TDynamic, + T['_']['excludedMethods'] | K + >, + T['_']['excludedMethods'] | K +>; + +export type SingleStoreUpdatePrepare = PreparedQueryKind< + T['_']['preparedQueryHKT'], + SingleStorePreparedQueryConfig & { + execute: SingleStoreQueryResultKind; + iterator: never; + }, + true +>; + +export type SingleStoreUpdateDynamic = SingleStoreUpdate< + T['_']['table'], + T['_']['queryResult'], + T['_']['preparedQueryHKT'] +>; + +export type SingleStoreUpdate< + TTable extends SingleStoreTable = SingleStoreTable, + TQueryResult extends SingleStoreQueryResultHKT = AnySingleStoreQueryResultHKT, + TPreparedQueryHKT extends PreparedQueryHKTBase = PreparedQueryHKTBase, +> = SingleStoreUpdateBase; + +export type AnySingleStoreUpdateBase = SingleStoreUpdateBase; + +export interface SingleStoreUpdateBase< + TTable extends SingleStoreTable, + TQueryResult extends SingleStoreQueryResultHKT, + TPreparedQueryHKT extends PreparedQueryHKTBase, + TDynamic extends boolean = false, + TExcludedMethods extends string = never, +> extends QueryPromise>, SQLWrapper { + readonly _: { + readonly table: TTable; + readonly queryResult: TQueryResult; + readonly preparedQueryHKT: TPreparedQueryHKT; + readonly dynamic: TDynamic; + readonly excludedMethods: TExcludedMethods; + }; +} + +export class SingleStoreUpdateBase< + TTable extends SingleStoreTable, + TQueryResult extends SingleStoreQueryResultHKT, + // eslint-disable-next-line @typescript-eslint/no-unused-vars + TPreparedQueryHKT extends PreparedQueryHKTBase, + // eslint-disable-next-line @typescript-eslint/no-unused-vars + TDynamic extends boolean = false, + // eslint-disable-next-line @typescript-eslint/no-unused-vars + TExcludedMethods extends string = never, +> extends QueryPromise> implements SQLWrapper { + static override readonly [entityKind]: string = 'SingleStoreUpdate'; + + private config: SingleStoreUpdateConfig; + + constructor( + table: TTable, + set: UpdateSet, + private session: SingleStoreSession, + private dialect: SingleStoreDialect, + withList?: Subquery[], + ) { + super(); + this.config = { set, table, withList }; + } + + /** + * Adds a 'where' clause to the query. + * + * Calling this method will update only those rows that fulfill a specified condition. + * + * See docs: {@link https://orm.drizzle.team/docs/update} + * + * @param where the 'where' clause. + * + * @example + * You can use conditional operators and `sql function` to filter the rows to be updated. + * + * ```ts + * // Update all cars with green color + * db.update(cars).set({ color: 'red' }) + * .where(eq(cars.color, 'green')); + * // or + * db.update(cars).set({ color: 'red' }) + * .where(sql`${cars.color} = 'green'`) + * ``` + * + * You can logically combine conditional operators with `and()` and `or()` operators: + * + * ```ts + * // Update all BMW cars with a green color + * db.update(cars).set({ color: 'red' }) + * .where(and(eq(cars.color, 'green'), eq(cars.brand, 'BMW'))); + * + * // Update all cars with the green or blue color + * db.update(cars).set({ color: 'red' }) + * .where(or(eq(cars.color, 'green'), eq(cars.color, 'blue'))); + * ``` + */ + where(where: SQL | undefined): SingleStoreUpdateWithout { + this.config.where = where; + return this as any; + } + + orderBy( + builder: (updateTable: TTable) => ValueOrArray, + ): SingleStoreUpdateWithout; + orderBy(...columns: (SingleStoreColumn | SQL | SQL.Aliased)[]): SingleStoreUpdateWithout; + orderBy( + ...columns: + | [(updateTable: TTable) => ValueOrArray] + | (SingleStoreColumn | SQL | SQL.Aliased)[] + ): SingleStoreUpdateWithout { + if (typeof columns[0] === 'function') { + const orderBy = columns[0]( + new Proxy( + this.config.table[Table.Symbol.Columns], + new SelectionProxyHandler({ sqlAliasedBehavior: 'alias', sqlBehavior: 'sql' }), + ) as any, + ); + + const orderByArray = Array.isArray(orderBy) ? orderBy : [orderBy]; + this.config.orderBy = orderByArray; + } else { + const orderByArray = columns as (SingleStoreColumn | SQL | SQL.Aliased)[]; + this.config.orderBy = orderByArray; + } + return this as any; + } + + limit(limit: number | Placeholder): SingleStoreUpdateWithout { + this.config.limit = limit; + return this as any; + } + + /** @internal */ + getSQL(): SQL { + return this.dialect.buildUpdateQuery(this.config); + } + + toSQL(): Query { + const { typings: _typings, ...rest } = this.dialect.sqlToQuery(this.getSQL()); + return rest; + } + + prepare(): SingleStoreUpdatePrepare { + return this.session.prepareQuery( + this.dialect.sqlToQuery(this.getSQL()), + this.config.returning, + ) as SingleStoreUpdatePrepare; + } + + override execute: ReturnType['execute'] = (placeholderValues) => { + return this.prepare().execute(placeholderValues); + }; + + private createIterator = (): ReturnType['iterator'] => { + const self = this; + return async function*(placeholderValues) { + yield* self.prepare().iterator(placeholderValues); + }; + }; + + iterator = this.createIterator(); + + $dynamic(): SingleStoreUpdateDynamic { + return this as any; + } +} diff --git a/drizzle-orm/src/singlestore-core/schema.ts b/drizzle-orm/src/singlestore-core/schema.ts new file mode 100644 index 000000000..ea7a53924 --- /dev/null +++ b/drizzle-orm/src/singlestore-core/schema.ts @@ -0,0 +1,40 @@ +import { entityKind, is } from '~/entity.ts'; +import { type SingleStoreTableFn, singlestoreTableWithSchema } from './table.ts'; +/* import { type singlestoreView, singlestoreViewWithSchema } from './view.ts'; */ + +export class SingleStoreSchema { + static readonly [entityKind]: string = 'SingleStoreSchema'; + + constructor( + public readonly schemaName: TName, + ) {} + + table: SingleStoreTableFn = (name, columns, extraConfig) => { + return singlestoreTableWithSchema(name, columns, extraConfig, this.schemaName); + }; + /* + view = ((name, columns) => { + return singlestoreViewWithSchema(name, columns, this.schemaName); + }) as typeof singlestoreView; */ +} + +/** @deprecated - use `instanceof SingleStoreSchema` */ +export function isSingleStoreSchema(obj: unknown): obj is SingleStoreSchema { + return is(obj, SingleStoreSchema); +} + +/** + * Create a SingleStore schema. + * https://docs.singlestore.com/cloud/create-a-database/ + * + * @param name singlestore use schema name + * @returns SingleStore schema + */ +export function singlestoreDatabase(name: TName) { + return new SingleStoreSchema(name); +} + +/** + * @see singlestoreDatabase + */ +export const singlestoreSchema = singlestoreDatabase; diff --git a/drizzle-orm/src/singlestore-core/session.ts b/drizzle-orm/src/singlestore-core/session.ts new file mode 100644 index 000000000..bc31f3d97 --- /dev/null +++ b/drizzle-orm/src/singlestore-core/session.ts @@ -0,0 +1,157 @@ +import { entityKind } from '~/entity.ts'; +import { TransactionRollbackError } from '~/errors.ts'; +import type { RelationalSchemaConfig, TablesRelationalConfig } from '~/relations.ts'; +import { type Query, type SQL, sql } from '~/sql/sql.ts'; +import type { Assume, Equal } from '~/utils.ts'; +import { SingleStoreDatabase } from './db.ts'; +import type { SingleStoreDialect } from './dialect.ts'; +import type { SelectedFieldsOrdered } from './query-builders/select.types.ts'; + +export interface SingleStoreQueryResultHKT { + readonly $brand: 'SingleStoreQueryResultHKT'; + readonly row: unknown; + readonly type: unknown; +} + +export interface AnySingleStoreQueryResultHKT extends SingleStoreQueryResultHKT { + readonly type: any; +} + +export type SingleStoreQueryResultKind = (TKind & { + readonly row: TRow; +})['type']; + +export interface SingleStorePreparedQueryConfig { + execute: unknown; + iterator: unknown; +} + +export interface SingleStorePreparedQueryHKT { + readonly $brand: 'SingleStorePreparedQueryHKT'; + readonly config: unknown; + readonly type: unknown; +} + +export type PreparedQueryKind< + TKind extends SingleStorePreparedQueryHKT, + TConfig extends SingleStorePreparedQueryConfig, + TAssume extends boolean = false, +> = Equal extends true + ? Assume<(TKind & { readonly config: TConfig })['type'], SingleStorePreparedQuery> + : (TKind & { readonly config: TConfig })['type']; + +export abstract class SingleStorePreparedQuery { + static readonly [entityKind]: string = 'SingleStorePreparedQuery'; + + /** @internal */ + joinsNotNullableMap?: Record; + + abstract execute(placeholderValues?: Record): Promise; + + abstract iterator(placeholderValues?: Record): AsyncGenerator; +} + +export interface SingleStoreTransactionConfig { + withConsistentSnapshot?: boolean; + accessMode?: 'read only' | 'read write'; + isolationLevel: 'read committed'; // SingleStore only supports read committed isolation level (https://docs.singlestore.com/db/v8.7/introduction/faqs/durability/) +} + +export abstract class SingleStoreSession< + TQueryResult extends SingleStoreQueryResultHKT = SingleStoreQueryResultHKT, + TPreparedQueryHKT extends PreparedQueryHKTBase = PreparedQueryHKTBase, + TFullSchema extends Record = Record, + TSchema extends TablesRelationalConfig = Record, +> { + static readonly [entityKind]: string = 'SingleStoreSession'; + + constructor(protected dialect: SingleStoreDialect) {} + + abstract prepareQuery< + T extends SingleStorePreparedQueryConfig, + TPreparedQueryHKT extends SingleStorePreparedQueryHKT, + >( + query: Query, + fields: SelectedFieldsOrdered | undefined, + customResultMapper?: (rows: unknown[][]) => T['execute'], + generatedIds?: Record[], + returningIds?: SelectedFieldsOrdered, + ): PreparedQueryKind; + + execute(query: SQL): Promise { + return this.prepareQuery( + this.dialect.sqlToQuery(query), + undefined, + ).execute(); + } + + abstract all(query: SQL): Promise; + + async count(sql: SQL): Promise { + const res = await this.execute<[[{ count: string }]]>(sql); + + return Number( + res[0][0]['count'], + ); + } + + abstract transaction( + transaction: (tx: SingleStoreTransaction) => Promise, + config?: SingleStoreTransactionConfig, + ): Promise; + + protected getSetTransactionSQL(config: SingleStoreTransactionConfig): SQL | undefined { + const parts: string[] = []; + + if (config.isolationLevel) { + parts.push(`isolation level ${config.isolationLevel}`); + } + + return parts.length ? sql`set transaction ${sql.raw(parts.join(' '))}` : undefined; + } + + protected getStartTransactionSQL(config: SingleStoreTransactionConfig): SQL | undefined { + const parts: string[] = []; + + if (config.withConsistentSnapshot) { + parts.push('with consistent snapshot'); + } + + if (config.accessMode) { + parts.push(config.accessMode); + } + + return parts.length ? sql`start transaction ${sql.raw(parts.join(' '))}` : undefined; + } +} + +export abstract class SingleStoreTransaction< + TQueryResult extends SingleStoreQueryResultHKT, + TPreparedQueryHKT extends PreparedQueryHKTBase, + TFullSchema extends Record = Record, + TSchema extends TablesRelationalConfig = Record, +> extends SingleStoreDatabase { + static override readonly [entityKind]: string = 'SingleStoreTransaction'; + + constructor( + dialect: SingleStoreDialect, + session: SingleStoreSession, + protected schema: RelationalSchemaConfig | undefined, + protected readonly nestedIndex: number, + ) { + super(dialect, session, schema); + } + + rollback(): never { + throw new TransactionRollbackError(); + } + + /** Nested transactions (aka savepoints) only work with InnoDB engine. */ + abstract override transaction( + transaction: (tx: SingleStoreTransaction) => Promise, + ): Promise; +} + +export interface PreparedQueryHKTBase extends SingleStorePreparedQueryHKT { + type: SingleStorePreparedQuery>; +} diff --git a/drizzle-orm/src/singlestore-core/subquery.ts b/drizzle-orm/src/singlestore-core/subquery.ts new file mode 100644 index 000000000..a4605c56d --- /dev/null +++ b/drizzle-orm/src/singlestore-core/subquery.ts @@ -0,0 +1,17 @@ +import type { AddAliasToSelection } from '~/query-builders/select.types.ts'; +import type { ColumnsSelection } from '~/sql/sql.ts'; +import type { Subquery, WithSubquery } from '~/subquery.ts'; + +export type SubqueryWithSelection< + TSelection extends ColumnsSelection, + TAlias extends string, +> = + & Subquery> + & AddAliasToSelection; + +export type WithSubqueryWithSelection< + TSelection extends ColumnsSelection, + TAlias extends string, +> = + & WithSubquery> + & AddAliasToSelection; diff --git a/drizzle-orm/src/singlestore-core/table.ts b/drizzle-orm/src/singlestore-core/table.ts new file mode 100644 index 000000000..4cc8973ee --- /dev/null +++ b/drizzle-orm/src/singlestore-core/table.ts @@ -0,0 +1,138 @@ +import type { BuildColumns, BuildExtraConfigColumns } from '~/column-builder.ts'; +import { entityKind } from '~/entity.ts'; +import { Table, type TableConfig as TableConfigBase, type UpdateTableConfig } from '~/table.ts'; +import { getSingleStoreColumnBuilders, type SingleStoreColumnBuilders } from './columns/all.ts'; +import type { SingleStoreColumn, SingleStoreColumnBuilder, SingleStoreColumnBuilderBase } from './columns/common.ts'; +import type { AnyIndexBuilder } from './indexes.ts'; +import type { PrimaryKeyBuilder } from './primary-keys.ts'; +import type { UniqueConstraintBuilder } from './unique-constraint.ts'; + +export type SingleStoreTableExtraConfig = Record< + string, + | AnyIndexBuilder + | PrimaryKeyBuilder + | UniqueConstraintBuilder +>; + +export type TableConfig = TableConfigBase; + +export class SingleStoreTable extends Table { + static override readonly [entityKind]: string = 'SingleStoreTable'; + + declare protected $columns: T['columns']; + + /** @internal */ + static override readonly Symbol = Object.assign({}, Table.Symbol, {}); + + /** @internal */ + override [Table.Symbol.Columns]!: NonNullable; + + /** @internal */ + override [Table.Symbol.ExtraConfigBuilder]: + | ((self: Record) => SingleStoreTableExtraConfig) + | undefined = undefined; +} + +export type AnySingleStoreTable = {}> = SingleStoreTable< + UpdateTableConfig +>; + +export type SingleStoreTableWithColumns = + & SingleStoreTable + & { + [Key in keyof T['columns']]: T['columns'][Key]; + }; + +export function singlestoreTableWithSchema< + TTableName extends string, + TSchemaName extends string | undefined, + TColumnsMap extends Record, +>( + name: TTableName, + columns: TColumnsMap | ((columnTypes: SingleStoreColumnBuilders) => TColumnsMap), + extraConfig: + | ((self: BuildColumns) => SingleStoreTableExtraConfig) + | undefined, + schema: TSchemaName, + baseName = name, +): SingleStoreTableWithColumns<{ + name: TTableName; + schema: TSchemaName; + columns: BuildColumns; + dialect: 'singlestore'; +}> { + const rawTable = new SingleStoreTable<{ + name: TTableName; + schema: TSchemaName; + columns: BuildColumns; + dialect: 'singlestore'; + }>(name, schema, baseName); + + const parsedColumns: TColumnsMap = typeof columns === 'function' ? columns(getSingleStoreColumnBuilders()) : columns; + + const builtColumns = Object.fromEntries( + Object.entries(parsedColumns).map(([name, colBuilderBase]) => { + const colBuilder = colBuilderBase as SingleStoreColumnBuilder; + colBuilder.setName(name); + const column = colBuilder.build(rawTable); + return [name, column]; + }), + ) as unknown as BuildColumns; + + const table = Object.assign(rawTable, builtColumns); + + table[Table.Symbol.Columns] = builtColumns; + table[Table.Symbol.ExtraConfigColumns] = builtColumns as unknown as BuildExtraConfigColumns< + TTableName, + TColumnsMap, + 'singlestore' + >; + + if (extraConfig) { + table[SingleStoreTable.Symbol.ExtraConfigBuilder] = extraConfig as unknown as ( + self: Record, + ) => SingleStoreTableExtraConfig; + } + + return table; +} + +export interface SingleStoreTableFn { + < + TTableName extends string, + TColumnsMap extends Record, + >( + name: TTableName, + columns: TColumnsMap, + extraConfig?: (self: BuildColumns) => SingleStoreTableExtraConfig, + ): SingleStoreTableWithColumns<{ + name: TTableName; + schema: TSchemaName; + columns: BuildColumns; + dialect: 'singlestore'; + }>; + + < + TTableName extends string, + TColumnsMap extends Record, + >( + name: TTableName, + columns: (columnTypes: SingleStoreColumnBuilders) => TColumnsMap, + extraConfig?: (self: BuildColumns) => SingleStoreTableExtraConfig, + ): SingleStoreTableWithColumns<{ + name: TTableName; + schema: TSchemaName; + columns: BuildColumns; + dialect: 'singlestore'; + }>; +} + +export const singlestoreTable: SingleStoreTableFn = (name, columns, extraConfig) => { + return singlestoreTableWithSchema(name, columns, extraConfig, undefined, name); +}; + +export function singlestoreTableCreator(customizeTableName: (name: string) => string): SingleStoreTableFn { + return (name, columns, extraConfig) => { + return singlestoreTableWithSchema(customizeTableName(name) as typeof name, columns, extraConfig, undefined, name); + }; +} diff --git a/drizzle-orm/src/singlestore-core/unique-constraint.ts b/drizzle-orm/src/singlestore-core/unique-constraint.ts new file mode 100644 index 000000000..511e466dc --- /dev/null +++ b/drizzle-orm/src/singlestore-core/unique-constraint.ts @@ -0,0 +1,65 @@ +import { entityKind } from '~/entity.ts'; +import { TableName } from '~/table.utils.ts'; +import type { SingleStoreColumn } from './columns/index.ts'; +import type { SingleStoreTable } from './table.ts'; + +export function unique(name?: string): UniqueOnConstraintBuilder { + return new UniqueOnConstraintBuilder(name); +} + +export function uniqueKeyName(table: SingleStoreTable, columns: string[]) { + return `${table[TableName]}_${columns.join('_')}_unique`; +} + +export class UniqueConstraintBuilder { + static readonly [entityKind]: string = 'SingleStoreUniqueConstraintBuilder'; + + /** @internal */ + columns: SingleStoreColumn[]; + + constructor( + columns: SingleStoreColumn[], + private name?: string, + ) { + this.columns = columns; + } + + /** @internal */ + build(table: SingleStoreTable): UniqueConstraint { + return new UniqueConstraint(table, this.columns, this.name); + } +} + +export class UniqueOnConstraintBuilder { + static readonly [entityKind]: string = 'SingleStoreUniqueOnConstraintBuilder'; + + /** @internal */ + name?: string; + + constructor( + name?: string, + ) { + this.name = name; + } + + on(...columns: [SingleStoreColumn, ...SingleStoreColumn[]]) { + return new UniqueConstraintBuilder(columns, this.name); + } +} + +export class UniqueConstraint { + static readonly [entityKind]: string = 'SingleStoreUniqueConstraint'; + + readonly columns: SingleStoreColumn[]; + readonly name?: string; + readonly nullsNotDistinct: boolean = false; + + constructor(readonly table: SingleStoreTable, columns: SingleStoreColumn[], name?: string) { + this.columns = columns; + this.name = name ?? uniqueKeyName(this.table, this.columns.map((column) => column.name)); + } + + getName() { + return this.name; + } +} diff --git a/drizzle-orm/src/singlestore-core/utils.ts b/drizzle-orm/src/singlestore-core/utils.ts new file mode 100644 index 000000000..634b4e261 --- /dev/null +++ b/drizzle-orm/src/singlestore-core/utils.ts @@ -0,0 +1,55 @@ +import { is } from '~/entity.ts'; +import { Table } from '~/table.ts'; +import type { Index } from './indexes.ts'; +import { IndexBuilder } from './indexes.ts'; +import type { PrimaryKey } from './primary-keys.ts'; +import { PrimaryKeyBuilder } from './primary-keys.ts'; +import { SingleStoreTable } from './table.ts'; +import { type UniqueConstraint, UniqueConstraintBuilder } from './unique-constraint.ts'; +/* import { SingleStoreViewConfig } from './view-common.ts'; +import type { SingleStoreView } from './view.ts'; */ + +export function getTableConfig(table: SingleStoreTable) { + const columns = Object.values(table[SingleStoreTable.Symbol.Columns]); + const indexes: Index[] = []; + const primaryKeys: PrimaryKey[] = []; + const uniqueConstraints: UniqueConstraint[] = []; + const name = table[Table.Symbol.Name]; + const schema = table[Table.Symbol.Schema]; + const baseName = table[Table.Symbol.BaseName]; + + const extraConfigBuilder = table[SingleStoreTable.Symbol.ExtraConfigBuilder]; + + if (extraConfigBuilder !== undefined) { + const extraConfig = extraConfigBuilder(table[SingleStoreTable.Symbol.Columns]); + for (const builder of Object.values(extraConfig)) { + if (is(builder, IndexBuilder)) { + indexes.push(builder.build(table)); + } else if (is(builder, UniqueConstraintBuilder)) { + uniqueConstraints.push(builder.build(table)); + } else if (is(builder, PrimaryKeyBuilder)) { + primaryKeys.push(builder.build(table)); + } + } + } + + return { + columns, + indexes, + primaryKeys, + uniqueConstraints, + name, + schema, + baseName, + }; +} + +/* export function getViewConfig< + TName extends string = string, + TExisting extends boolean = boolean, +>(view: SingleStoreView) { + return { + ...view[ViewBaseConfig], + ...view[SingleStoreViewConfig], + }; +} */ diff --git a/drizzle-orm/src/singlestore-core/view-base.ts b/drizzle-orm/src/singlestore-core/view-base.ts new file mode 100644 index 000000000..1ad8d62d5 --- /dev/null +++ b/drizzle-orm/src/singlestore-core/view-base.ts @@ -0,0 +1,15 @@ +import { entityKind } from '~/entity.ts'; +import type { ColumnsSelection } from '~/sql/sql.ts'; +import { View } from '~/sql/sql.ts'; + +export abstract class SingleStoreViewBase< + TName extends string = string, + TExisting extends boolean = boolean, + TSelectedFields extends ColumnsSelection = ColumnsSelection, +> extends View { + static override readonly [entityKind]: string = 'SingleStoreViewBase'; + + declare readonly _: View['_'] & { + readonly viewBrand: 'SingleStoreViewBase'; + }; +} diff --git a/drizzle-orm/src/singlestore-core/view-common.ts b/drizzle-orm/src/singlestore-core/view-common.ts new file mode 100644 index 000000000..d29c3d5ad --- /dev/null +++ b/drizzle-orm/src/singlestore-core/view-common.ts @@ -0,0 +1 @@ +export const SingleStoreViewConfig = Symbol.for('drizzle:SingleStoreViewConfig'); diff --git a/drizzle-orm/src/singlestore-core/view.ts b/drizzle-orm/src/singlestore-core/view.ts new file mode 100644 index 000000000..58f111428 --- /dev/null +++ b/drizzle-orm/src/singlestore-core/view.ts @@ -0,0 +1,209 @@ +import type { BuildColumns } from '~/column-builder.ts'; +import { entityKind } from '~/entity.ts'; +import type { TypedQueryBuilder } from '~/query-builders/query-builder.ts'; +import type { AddAliasToSelection } from '~/query-builders/select.types.ts'; +import { SelectionProxyHandler } from '~/selection-proxy.ts'; +import type { ColumnsSelection, SQL } from '~/sql/sql.ts'; +import { getTableColumns } from '~/utils.ts'; +import type { SingleStoreColumn, SingleStoreColumnBuilderBase } from './columns/index.ts'; +import { QueryBuilder } from './query-builders/query-builder.ts'; +import type { SelectedFields } from './query-builders/select.types.ts'; +import { singlestoreTable } from './table.ts'; +import { SingleStoreViewBase } from './view-base.ts'; +import { SingleStoreViewConfig } from './view-common.ts'; + +export interface ViewBuilderConfig { + algorithm?: 'undefined' | 'merge' | 'temptable'; + definer?: string; + sqlSecurity?: 'definer' | 'invoker'; + withCheckOption?: 'cascaded' | 'local'; +} + +export class ViewBuilderCore { + static readonly [entityKind]: string = 'SingleStoreViewBuilder'; + + declare readonly _: { + readonly name: TConfig['name']; + readonly columns: TConfig['columns']; + }; + + constructor( + protected name: TConfig['name'], + protected schema: string | undefined, + ) {} + + protected config: ViewBuilderConfig = {}; + + algorithm( + algorithm: Exclude, + ): this { + this.config.algorithm = algorithm; + return this; + } + + definer( + definer: Exclude, + ): this { + this.config.definer = definer; + return this; + } + + sqlSecurity( + sqlSecurity: Exclude, + ): this { + this.config.sqlSecurity = sqlSecurity; + return this; + } + + withCheckOption( + withCheckOption?: Exclude, + ): this { + this.config.withCheckOption = withCheckOption ?? 'cascaded'; + return this; + } +} + +export class ViewBuilder extends ViewBuilderCore<{ name: TName }> { + static override readonly [entityKind]: string = 'SingleStoreViewBuilder'; + + as( + qb: TypedQueryBuilder | ((qb: QueryBuilder) => TypedQueryBuilder), + ): SingleStoreViewWithSelection> { + if (typeof qb === 'function') { + qb = qb(new QueryBuilder()); + } + const selectionProxy = new SelectionProxyHandler({ + alias: this.name, + sqlBehavior: 'error', + sqlAliasedBehavior: 'alias', + replaceOriginalName: true, + }); + const aliasedSelection = new Proxy(qb.getSelectedFields(), selectionProxy); + return new Proxy( + new SingleStoreView({ + singlestoreConfig: this.config, + config: { + name: this.name, + schema: this.schema, + selectedFields: aliasedSelection, + query: qb.getSQL().inlineParams(), + }, + }), + selectionProxy as any, + ) as SingleStoreViewWithSelection>; + } +} + +export class ManualViewBuilder< + TName extends string = string, + TColumns extends Record = Record, +> extends ViewBuilderCore<{ name: TName; columns: TColumns }> { + static override readonly [entityKind]: string = 'SingleStoreManualViewBuilder'; + + private columns: Record; + + constructor( + name: TName, + columns: TColumns, + schema: string | undefined, + ) { + super(name, schema); + this.columns = getTableColumns(singlestoreTable(name, columns)) as BuildColumns; + } + + existing(): SingleStoreViewWithSelection> { + return new Proxy( + new SingleStoreView({ + singlestoreConfig: undefined, + config: { + name: this.name, + schema: this.schema, + selectedFields: this.columns, + query: undefined, + }, + }), + new SelectionProxyHandler({ + alias: this.name, + sqlBehavior: 'error', + sqlAliasedBehavior: 'alias', + replaceOriginalName: true, + }), + ) as SingleStoreViewWithSelection>; + } + + as(query: SQL): SingleStoreViewWithSelection> { + return new Proxy( + new SingleStoreView({ + singlestoreConfig: this.config, + config: { + name: this.name, + schema: this.schema, + selectedFields: this.columns, + query: query.inlineParams(), + }, + }), + new SelectionProxyHandler({ + alias: this.name, + sqlBehavior: 'error', + sqlAliasedBehavior: 'alias', + replaceOriginalName: true, + }), + ) as SingleStoreViewWithSelection>; + } +} + +export class SingleStoreView< + TName extends string = string, + TExisting extends boolean = boolean, + TSelectedFields extends ColumnsSelection = ColumnsSelection, +> extends SingleStoreViewBase { + static override readonly [entityKind]: string = 'SingleStoreView'; + + declare protected $SingleStoreViewBrand: 'SingleStoreView'; + + [SingleStoreViewConfig]: ViewBuilderConfig | undefined; + + constructor({ singlestoreConfig, config }: { + singlestoreConfig: ViewBuilderConfig | undefined; + config: { + name: TName; + schema: string | undefined; + selectedFields: SelectedFields; + query: SQL | undefined; + }; + }) { + super(config); + this[SingleStoreViewConfig] = singlestoreConfig; + } +} + +export type SingleStoreViewWithSelection< + TName extends string, + TExisting extends boolean, + TSelectedFields extends ColumnsSelection, +> = SingleStoreView & TSelectedFields; + +// TODO: needs to be implemented differently compared to MySQL. +// /** @internal */ +// export function singlestoreViewWithSchema( +// name: string, +// selection: Record | undefined, +// schema: string | undefined, +// ): ViewBuilder | ManualViewBuilder { +// if (selection) { +// return new ManualViewBuilder(name, selection, schema); +// } +// return new ViewBuilder(name, schema); +// } + +// export function singlestoreView(name: TName): ViewBuilder; +// export function singlestoreView>( +// name: TName, +// columns: TColumns, +// ): ManualViewBuilder; +// export function singlestoreView( +// name: string, +// selection?: Record, +// ): ViewBuilder | ManualViewBuilder { +// return singlestoreViewWithSchema(name, selection, undefined); +// } diff --git a/drizzle-orm/src/singlestore-proxy/driver.ts b/drizzle-orm/src/singlestore-proxy/driver.ts new file mode 100644 index 000000000..ea24ae2d8 --- /dev/null +++ b/drizzle-orm/src/singlestore-proxy/driver.ts @@ -0,0 +1,59 @@ +import { entityKind } from '~/entity.ts'; +import { DefaultLogger } from '~/logger.ts'; +import { + createTableRelationsHelpers, + extractTablesRelationalConfig, + type RelationalSchemaConfig, + type TablesRelationalConfig, +} from '~/relations.ts'; +import { SingleStoreDatabase } from '~/singlestore-core/db.ts'; +import { SingleStoreDialect } from '~/singlestore-core/dialect.ts'; +import type { DrizzleConfig } from '~/utils.ts'; +import { + type SingleStoreRemotePreparedQueryHKT, + type SingleStoreRemoteQueryResultHKT, + SingleStoreRemoteSession, +} from './session.ts'; + +export class SingleStoreRemoteDatabase< + TSchema extends Record = Record, +> extends SingleStoreDatabase { + static override readonly [entityKind]: string = 'SingleStoreRemoteDatabase'; +} + +export type RemoteCallback = ( + sql: string, + params: any[], + method: 'all' | 'execute', +) => Promise<{ rows: any[]; insertId?: number; affectedRows?: number }>; + +export function drizzle = Record>( + callback: RemoteCallback, + config: DrizzleConfig = {}, +): SingleStoreRemoteDatabase { + const dialect = new SingleStoreDialect({ casing: config.casing }); + let logger; + if (config.logger === true) { + logger = new DefaultLogger(); + } else if (config.logger !== false) { + logger = config.logger; + } + + let schema: RelationalSchemaConfig | undefined; + if (config.schema) { + const tablesConfig = extractTablesRelationalConfig( + config.schema, + createTableRelationsHelpers, + ); + schema = { + fullSchema: config.schema, + schema: tablesConfig.tables, + tableNamesMap: tablesConfig.tableNamesMap, + }; + } + + const session = new SingleStoreRemoteSession(callback, dialect, schema, { logger }); + return new SingleStoreRemoteDatabase(dialect, session, schema as any) as SingleStoreRemoteDatabase< + TSchema + >; +} diff --git a/drizzle-orm/src/singlestore-proxy/index.ts b/drizzle-orm/src/singlestore-proxy/index.ts new file mode 100644 index 000000000..b1b6a52e7 --- /dev/null +++ b/drizzle-orm/src/singlestore-proxy/index.ts @@ -0,0 +1,2 @@ +export * from './driver.ts'; +export * from './session.ts'; diff --git a/drizzle-orm/src/singlestore-proxy/migrator.ts b/drizzle-orm/src/singlestore-proxy/migrator.ts new file mode 100644 index 000000000..2ed0172fb --- /dev/null +++ b/drizzle-orm/src/singlestore-proxy/migrator.ts @@ -0,0 +1,52 @@ +import type { MigrationConfig } from '~/migrator.ts'; +import { readMigrationFiles } from '~/migrator.ts'; +import { sql } from '~/sql/sql.ts'; +import type { SingleStoreRemoteDatabase } from './driver.ts'; + +export type ProxyMigrator = (migrationQueries: string[]) => Promise; + +export async function migrate>( + db: SingleStoreRemoteDatabase, + callback: ProxyMigrator, + config: MigrationConfig, +) { + const migrations = readMigrationFiles(config); + + const migrationsTable = config.migrationsTable ?? '__drizzle_migrations'; + const migrationTableCreate = sql` + create table if not exists ${sql.identifier(migrationsTable)} ( + id serial primary key, + hash text not null, + created_at bigint + ) + `; + await db.execute(migrationTableCreate); + + const dbMigrations = await db.select({ + id: sql.raw('id'), + hash: sql.raw('hash'), + created_at: sql.raw('created_at'), + }).from(sql.identifier(migrationsTable).getSQL()).orderBy( + sql.raw('created_at desc'), + ).limit(1); + + const lastDbMigration = dbMigrations[0]; + + const queriesToRun: string[] = []; + + for (const migration of migrations) { + if ( + !lastDbMigration + || Number(lastDbMigration.created_at) < migration.folderMillis + ) { + queriesToRun.push( + ...migration.sql, + `insert into ${ + sql.identifier(migrationsTable).value + } (\`hash\`, \`created_at\`) values('${migration.hash}', '${migration.folderMillis}')`, + ); + } + } + + await callback(queriesToRun); +} diff --git a/drizzle-orm/src/singlestore-proxy/session.ts b/drizzle-orm/src/singlestore-proxy/session.ts new file mode 100644 index 000000000..42cc8ecde --- /dev/null +++ b/drizzle-orm/src/singlestore-proxy/session.ts @@ -0,0 +1,178 @@ +import type { FieldPacket, ResultSetHeader } from 'mysql2/promise'; +import { Column } from '~/column.ts'; +import { entityKind, is } from '~/entity.ts'; +import type { Logger } from '~/logger.ts'; +import { NoopLogger } from '~/logger.ts'; +import type { RelationalSchemaConfig, TablesRelationalConfig } from '~/relations.ts'; +import type { SingleStoreDialect } from '~/singlestore-core/dialect.ts'; +import { SingleStoreTransaction } from '~/singlestore-core/index.ts'; +import type { SelectedFieldsOrdered } from '~/singlestore-core/query-builders/select.types.ts'; +import type { + PreparedQueryKind, + SingleStorePreparedQueryConfig, + SingleStorePreparedQueryHKT, + SingleStoreQueryResultHKT, + SingleStoreTransactionConfig, +} from '~/singlestore-core/session.ts'; +import { SingleStorePreparedQuery as PreparedQueryBase, SingleStoreSession } from '~/singlestore-core/session.ts'; +import type { Query, SQL } from '~/sql/sql.ts'; +import { fillPlaceholders } from '~/sql/sql.ts'; +import { type Assume, mapResultRow } from '~/utils.ts'; +import type { RemoteCallback } from './driver.ts'; + +export type SingleStoreRawQueryResult = [ResultSetHeader, FieldPacket[]]; + +export interface SingleStoreRemoteSessionOptions { + logger?: Logger; +} + +export class SingleStoreRemoteSession< + TFullSchema extends Record, + TSchema extends TablesRelationalConfig, +> extends SingleStoreSession { + static override readonly [entityKind]: string = 'SingleStoreRemoteSession'; + + private logger: Logger; + + constructor( + private client: RemoteCallback, + dialect: SingleStoreDialect, + private schema: RelationalSchemaConfig | undefined, + options: SingleStoreRemoteSessionOptions, + ) { + super(dialect); + this.logger = options.logger ?? new NoopLogger(); + } + + prepareQuery( + query: Query, + fields: SelectedFieldsOrdered | undefined, + customResultMapper?: (rows: unknown[][]) => T['execute'], + generatedIds?: Record[], + returningIds?: SelectedFieldsOrdered, + ): PreparedQueryKind { + return new PreparedQuery( + this.client, + query.sql, + query.params, + this.logger, + fields, + customResultMapper, + generatedIds, + returningIds, + ) as PreparedQueryKind; + } + + override all(query: SQL): Promise { + const querySql = this.dialect.sqlToQuery(query); + this.logger.logQuery(querySql.sql, querySql.params); + return this.client(querySql.sql, querySql.params, 'all').then(({ rows }) => rows) as Promise; + } + + override async transaction( + _transaction: (tx: SingleStoreProxyTransaction) => Promise, + _config?: SingleStoreTransactionConfig, + ): Promise { + throw new Error('Transactions are not supported by the SingleStore Proxy driver'); + } +} + +export class SingleStoreProxyTransaction< + TFullSchema extends Record, + TSchema extends TablesRelationalConfig, +> extends SingleStoreTransaction< + SingleStoreRemoteQueryResultHKT, + SingleStoreRemotePreparedQueryHKT, + TFullSchema, + TSchema +> { + static override readonly [entityKind]: string = 'SingleStoreProxyTransaction'; + + override async transaction( + _transaction: (tx: SingleStoreProxyTransaction) => Promise, + ): Promise { + throw new Error('Transactions are not supported by the SingleStore Proxy driver'); + } +} + +export class PreparedQuery extends PreparedQueryBase { + static override readonly [entityKind]: string = 'SingleStoreProxyPreparedQuery'; + + constructor( + private client: RemoteCallback, + private queryString: string, + private params: unknown[], + private logger: Logger, + private fields: SelectedFieldsOrdered | undefined, + private customResultMapper?: (rows: unknown[][]) => T['execute'], + // Keys that were used in $default and the value that was generated for them + private generatedIds?: Record[], + // Keys that should be returned, it has the column with all properries + key from object + private returningIds?: SelectedFieldsOrdered, + ) { + super(); + } + + async execute(placeholderValues: Record | undefined = {}): Promise { + const params = fillPlaceholders(this.params, placeholderValues); + + const { fields, client, queryString, logger, joinsNotNullableMap, customResultMapper, returningIds, generatedIds } = + this; + + logger.logQuery(queryString, params); + + if (!fields && !customResultMapper) { + const { rows: data } = await client(queryString, params, 'execute'); + + const insertId = data[0].insertId as number; + const affectedRows = data[0].affectedRows; + + if (returningIds) { + const returningResponse = []; + let j = 0; + for (let i = insertId; i < insertId + affectedRows; i++) { + for (const column of returningIds) { + const key = returningIds[0]!.path[0]!; + if (is(column.field, Column)) { + // @ts-ignore + if (column.field.primary && column.field.autoIncrement) { + returningResponse.push({ [key]: i }); + } + if (column.field.defaultFn && generatedIds) { + // generatedIds[rowIdx][key] + returningResponse.push({ [key]: generatedIds[j]![key] }); + } + } + } + j++; + } + + return returningResponse; + } + + return data; + } + + const { rows } = await client(queryString, params, 'all'); + + if (customResultMapper) { + return customResultMapper(rows); + } + + return rows.map((row) => mapResultRow(fields!, row, joinsNotNullableMap)); + } + + override iterator( + _placeholderValues: Record = {}, + ): AsyncGenerator { + throw new Error('Streaming is not supported by the SingleStore Proxy driver'); + } +} + +export interface SingleStoreRemoteQueryResultHKT extends SingleStoreQueryResultHKT { + type: SingleStoreRawQueryResult; +} + +export interface SingleStoreRemotePreparedQueryHKT extends SingleStorePreparedQueryHKT { + type: PreparedQuery>; +} diff --git a/drizzle-orm/src/singlestore/driver.ts b/drizzle-orm/src/singlestore/driver.ts new file mode 100644 index 000000000..ba294f6dc --- /dev/null +++ b/drizzle-orm/src/singlestore/driver.ts @@ -0,0 +1,168 @@ +import { type Connection as CallbackConnection, createPool, type Pool as CallbackPool, type PoolOptions } from 'mysql2'; +import type { Connection, Pool } from 'mysql2/promise'; +import { entityKind } from '~/entity.ts'; +import type { Logger } from '~/logger.ts'; +import { DefaultLogger } from '~/logger.ts'; +import { + createTableRelationsHelpers, + extractTablesRelationalConfig, + type RelationalSchemaConfig, + type TablesRelationalConfig, +} from '~/relations.ts'; +import { SingleStoreDatabase } from '~/singlestore-core/db.ts'; +import { SingleStoreDialect } from '~/singlestore-core/dialect.ts'; +import { type DrizzleConfig, type IfNotImported, type ImportTypeError, isConfig } from '~/utils.ts'; +import type { + SingleStoreDriverClient, + SingleStoreDriverPreparedQueryHKT, + SingleStoreDriverQueryResultHKT, +} from './session.ts'; +import { SingleStoreDriverSession } from './session.ts'; + +export interface SingleStoreDriverOptions { + logger?: Logger; +} + +export class SingleStoreDriverDriver { + static readonly [entityKind]: string = 'SingleStoreDriverDriver'; + + constructor( + private client: SingleStoreDriverClient, + private dialect: SingleStoreDialect, + private options: SingleStoreDriverOptions = {}, + ) { + } + + createSession( + schema: RelationalSchemaConfig | undefined, + ): SingleStoreDriverSession, TablesRelationalConfig> { + return new SingleStoreDriverSession(this.client, this.dialect, schema, { logger: this.options.logger }); + } +} + +export { SingleStoreDatabase } from '~/singlestore-core/db.ts'; + +export class SingleStoreDriverDatabase< + TSchema extends Record = Record, +> extends SingleStoreDatabase { + static override readonly [entityKind]: string = 'SingleStoreDriverDatabase'; +} + +export type SingleStoreDriverDrizzleConfig = Record> = + & Omit, 'schema'> + & ({ schema: TSchema } | { schema?: undefined }); + +function construct< + TSchema extends Record = Record, + TClient extends Pool | Connection | CallbackPool | CallbackConnection = CallbackPool, +>( + client: TClient, + config: SingleStoreDriverDrizzleConfig = {}, +): SingleStoreDriverDatabase & { + $client: TClient; +} { + const dialect = new SingleStoreDialect({ casing: config.casing }); + let logger; + if (config.logger === true) { + logger = new DefaultLogger(); + } else if (config.logger !== false) { + logger = config.logger; + } + + const clientForInstance = isCallbackClient(client) ? client.promise() : client; + + let schema: RelationalSchemaConfig | undefined; + if (config.schema) { + const tablesConfig = extractTablesRelationalConfig( + config.schema, + createTableRelationsHelpers, + ); + schema = { + fullSchema: config.schema, + schema: tablesConfig.tables, + tableNamesMap: tablesConfig.tableNamesMap, + }; + } + + const driver = new SingleStoreDriverDriver(clientForInstance as SingleStoreDriverClient, dialect, { logger }); + const session = driver.createSession(schema); + const db = new SingleStoreDriverDatabase(dialect, session, schema as any) as SingleStoreDriverDatabase; + ( db).$client = client; + + return db as any; +} + +interface CallbackClient { + promise(): SingleStoreDriverClient; +} + +function isCallbackClient(client: any): client is CallbackClient { + return typeof client.promise === 'function'; +} + +export type AnySingleStoreDriverConnection = Pool | Connection | CallbackPool | CallbackConnection; + +export function drizzle< + TSchema extends Record = Record, + TClient extends AnySingleStoreDriverConnection = CallbackPool, +>( + ...params: IfNotImported< + CallbackPool, + [ImportTypeError<'singlestore'>], + [ + TClient | string, + ] | [ + TClient | string, + SingleStoreDriverDrizzleConfig, + ] | [ + ( + & SingleStoreDriverDrizzleConfig + & ({ + connection: string | PoolOptions; + } | { + client: TClient; + }) + ), + ] + > +): SingleStoreDriverDatabase & { + $client: TClient; +} { + if (typeof params[0] === 'string') { + const connectionString = params[0]!; + const instance = createPool({ + uri: connectionString, + }); + + return construct(instance, params[1]) as any; + } + + if (isConfig(params[0])) { + const { connection, client, ...drizzleConfig } = params[0] as + & { connection?: PoolOptions | string; client?: TClient } + & SingleStoreDriverDrizzleConfig; + + if (client) return construct(client, drizzleConfig) as any; + + const instance = typeof connection === 'string' + ? createPool({ + uri: connection, + }) + : createPool(connection!); + const db = construct(instance, drizzleConfig); + + return db as any; + } + + return construct(params[0] as TClient, params[1] as SingleStoreDriverDrizzleConfig | undefined) as any; +} + +export namespace drizzle { + export function mock = Record>( + config?: SingleStoreDriverDrizzleConfig, + ): SingleStoreDriverDatabase & { + $client: '$client is not available on drizzle.mock()'; + } { + return construct({} as any, config) as any; + } +} diff --git a/drizzle-orm/src/singlestore/index.ts b/drizzle-orm/src/singlestore/index.ts new file mode 100644 index 000000000..b1b6a52e7 --- /dev/null +++ b/drizzle-orm/src/singlestore/index.ts @@ -0,0 +1,2 @@ +export * from './driver.ts'; +export * from './session.ts'; diff --git a/drizzle-orm/src/singlestore/migrator.ts b/drizzle-orm/src/singlestore/migrator.ts new file mode 100644 index 000000000..6f342c0c5 --- /dev/null +++ b/drizzle-orm/src/singlestore/migrator.ts @@ -0,0 +1,11 @@ +import type { MigrationConfig } from '~/migrator.ts'; +import { readMigrationFiles } from '~/migrator.ts'; +import type { SingleStoreDriverDatabase } from './driver.ts'; + +export async function migrate>( + db: SingleStoreDriverDatabase, + config: MigrationConfig, +) { + const migrations = readMigrationFiles(config); + await db.dialect.migrate(migrations, db.session, config); +} diff --git a/drizzle-orm/src/singlestore/session.ts b/drizzle-orm/src/singlestore/session.ts new file mode 100644 index 000000000..fd70a1d52 --- /dev/null +++ b/drizzle-orm/src/singlestore/session.ts @@ -0,0 +1,340 @@ +import type { Connection as CallbackConnection } from 'mysql2'; +import type { + Connection, + FieldPacket, + OkPacket, + Pool, + PoolConnection, + QueryOptions, + ResultSetHeader, + RowDataPacket, +} from 'mysql2/promise'; +import { once } from 'node:events'; +import { Column } from '~/column.ts'; +import { entityKind, is } from '~/entity.ts'; +import type { Logger } from '~/logger.ts'; +import { NoopLogger } from '~/logger.ts'; +import type { RelationalSchemaConfig, TablesRelationalConfig } from '~/relations.ts'; +import type { SingleStoreDialect } from '~/singlestore-core/dialect.ts'; +import type { SelectedFieldsOrdered } from '~/singlestore-core/query-builders/select.types.ts'; +import { + type PreparedQueryKind, + SingleStorePreparedQuery, + type SingleStorePreparedQueryConfig, + type SingleStorePreparedQueryHKT, + type SingleStoreQueryResultHKT, + SingleStoreSession, + SingleStoreTransaction, + type SingleStoreTransactionConfig, +} from '~/singlestore-core/session.ts'; +import type { Query, SQL } from '~/sql/sql.ts'; +import { fillPlaceholders, sql } from '~/sql/sql.ts'; +import { type Assume, mapResultRow } from '~/utils.ts'; + +export type SingleStoreDriverClient = Pool | Connection; + +export type SingleStoreRawQueryResult = [ResultSetHeader, FieldPacket[]]; +export type SingleStoreQueryResultType = RowDataPacket[][] | RowDataPacket[] | OkPacket | OkPacket[] | ResultSetHeader; +export type SingleStoreQueryResult< + T = any, +> = [T extends ResultSetHeader ? T : T[], FieldPacket[]]; + +export class SingleStoreDriverPreparedQuery + extends SingleStorePreparedQuery +{ + static override readonly [entityKind]: string = 'SingleStoreDriverPreparedQuery'; + + private rawQuery: QueryOptions; + private query: QueryOptions; + + constructor( + private client: SingleStoreDriverClient, + queryString: string, + private params: unknown[], + private logger: Logger, + private fields: SelectedFieldsOrdered | undefined, + private customResultMapper?: (rows: unknown[][]) => T['execute'], + // Keys that were used in $default and the value that was generated for them + private generatedIds?: Record[], + // Keys that should be returned, it has the column with all properries + key from object + private returningIds?: SelectedFieldsOrdered, + ) { + super(); + this.rawQuery = { + sql: queryString, + // rowsAsArray: true, + typeCast: function(field: any, next: any) { + if (field.type === 'TIMESTAMP' || field.type === 'DATETIME' || field.type === 'DATE') { + return field.string(); + } + return next(); + }, + }; + this.query = { + sql: queryString, + rowsAsArray: true, + typeCast: function(field: any, next: any) { + if (field.type === 'TIMESTAMP' || field.type === 'DATETIME' || field.type === 'DATE') { + return field.string(); + } + return next(); + }, + }; + } + + async execute(placeholderValues: Record = {}): Promise { + const params = fillPlaceholders(this.params, placeholderValues); + + this.logger.logQuery(this.rawQuery.sql, params); + + const { fields, client, rawQuery, query, joinsNotNullableMap, customResultMapper, returningIds, generatedIds } = + this; + if (!fields && !customResultMapper) { + const res = await client.query(rawQuery, params); + const insertId = res[0].insertId; + const affectedRows = res[0].affectedRows; + // for each row, I need to check keys from + if (returningIds) { + const returningResponse = []; + let j = 0; + for (let i = insertId; i < insertId + affectedRows; i++) { + for (const column of returningIds) { + const key = returningIds[0]!.path[0]!; + if (is(column.field, Column)) { + // @ts-ignore + if (column.field.primary && column.field.autoIncrement) { + returningResponse.push({ [key]: i }); + } + if (column.field.defaultFn && generatedIds) { + // generatedIds[rowIdx][key] + returningResponse.push({ [key]: generatedIds[j]![key] }); + } + } + } + j++; + } + + return returningResponse; + } + return res; + } + + const result = await client.query(query, params); + const rows = result[0]; + + if (customResultMapper) { + return customResultMapper(rows); + } + + return rows.map((row) => mapResultRow(fields!, row, joinsNotNullableMap)); + } + + async *iterator( + placeholderValues: Record = {}, + ): AsyncGenerator { + const params = fillPlaceholders(this.params, placeholderValues); + const conn = ((isPool(this.client) ? await this.client.getConnection() : this.client) as {} as { + connection: CallbackConnection; + }).connection; + + const { fields, query, rawQuery, joinsNotNullableMap, client, customResultMapper } = this; + const hasRowsMapper = Boolean(fields || customResultMapper); + const driverQuery = hasRowsMapper ? conn.query(query, params) : conn.query(rawQuery, params); + + const stream = driverQuery.stream(); + + function dataListener() { + stream.pause(); + } + + stream.on('data', dataListener); + + try { + const onEnd = once(stream, 'end'); + const onError = once(stream, 'error'); + + while (true) { + stream.resume(); + const row = await Promise.race([onEnd, onError, new Promise((resolve) => stream.once('data', resolve))]); + if (row === undefined || (Array.isArray(row) && row.length === 0)) { + break; + } else if (row instanceof Error) { // eslint-disable-line no-instanceof/no-instanceof + throw row; + } else { + if (hasRowsMapper) { + if (customResultMapper) { + const mappedRow = customResultMapper([row as unknown[]]); + yield (Array.isArray(mappedRow) ? mappedRow[0] : mappedRow); + } else { + yield mapResultRow(fields!, row as unknown[], joinsNotNullableMap); + } + } else { + yield row as T['execute']; + } + } + } + } finally { + stream.off('data', dataListener); + if (isPool(client)) { + conn.end(); + } + } + } +} + +export interface SingleStoreDriverSessionOptions { + logger?: Logger; +} + +export class SingleStoreDriverSession< + TFullSchema extends Record, + TSchema extends TablesRelationalConfig, +> extends SingleStoreSession { + static override readonly [entityKind]: string = 'SingleStoreDriverSession'; + + private logger: Logger; + + constructor( + private client: SingleStoreDriverClient, + dialect: SingleStoreDialect, + private schema: RelationalSchemaConfig | undefined, + private options: SingleStoreDriverSessionOptions, + ) { + super(dialect); + this.logger = options.logger ?? new NoopLogger(); + } + + prepareQuery( + query: Query, + fields: SelectedFieldsOrdered | undefined, + customResultMapper?: (rows: unknown[][]) => T['execute'], + generatedIds?: Record[], + returningIds?: SelectedFieldsOrdered, + ): PreparedQueryKind { + // Add returningId fields + // Each driver gets them from response from database + return new SingleStoreDriverPreparedQuery( + this.client, + query.sql, + query.params, + this.logger, + fields, + customResultMapper, + generatedIds, + returningIds, + ) as PreparedQueryKind; + } + + /** + * @internal + * What is its purpose? + */ + async query(query: string, params: unknown[]): Promise { + this.logger.logQuery(query, params); + const result = await this.client.query({ + sql: query, + values: params, + rowsAsArray: true, + typeCast: function(field: any, next: any) { + if (field.type === 'TIMESTAMP' || field.type === 'DATETIME' || field.type === 'DATE') { + return field.string(); + } + return next(); + }, + }); + return result; + } + + override all(query: SQL): Promise { + const querySql = this.dialect.sqlToQuery(query); + this.logger.logQuery(querySql.sql, querySql.params); + return this.client.execute(querySql.sql, querySql.params).then((result) => result[0]) as Promise; + } + + override async transaction( + transaction: (tx: SingleStoreDriverTransaction) => Promise, + config?: SingleStoreTransactionConfig, + ): Promise { + const session = isPool(this.client) + ? new SingleStoreDriverSession( + await this.client.getConnection(), + this.dialect, + this.schema, + this.options, + ) + : this; + const tx = new SingleStoreDriverTransaction( + this.dialect, + session as SingleStoreSession, + this.schema, + 0, + ); + if (config) { + const setTransactionConfigSql = this.getSetTransactionSQL(config); + if (setTransactionConfigSql) { + await tx.execute(setTransactionConfigSql); + } + const startTransactionSql = this.getStartTransactionSQL(config); + await (startTransactionSql ? tx.execute(startTransactionSql) : tx.execute(sql`begin`)); + } else { + await tx.execute(sql`begin`); + } + try { + const result = await transaction(tx); + await tx.execute(sql`commit`); + return result; + } catch (err) { + await tx.execute(sql`rollback`); + throw err; + } finally { + if (isPool(this.client)) { + (session.client as PoolConnection).release(); + } + } + } +} + +export class SingleStoreDriverTransaction< + TFullSchema extends Record, + TSchema extends TablesRelationalConfig, +> extends SingleStoreTransaction< + SingleStoreDriverQueryResultHKT, + SingleStoreDriverPreparedQueryHKT, + TFullSchema, + TSchema +> { + static override readonly [entityKind]: string = 'SingleStoreDriverTransaction'; + + override async transaction( + transaction: (tx: SingleStoreDriverTransaction) => Promise, + ): Promise { + const savepointName = `sp${this.nestedIndex + 1}`; + const tx = new SingleStoreDriverTransaction( + this.dialect, + this.session, + this.schema, + this.nestedIndex + 1, + ); + await tx.execute(sql.raw(`savepoint ${savepointName}`)); + try { + const result = await transaction(tx); + await tx.execute(sql.raw(`release savepoint ${savepointName}`)); + return result; + } catch (err) { + await tx.execute(sql.raw(`rollback to savepoint ${savepointName}`)); + throw err; + } + } +} + +function isPool(client: SingleStoreDriverClient): client is Pool { + return 'getConnection' in client; +} + +export interface SingleStoreDriverQueryResultHKT extends SingleStoreQueryResultHKT { + type: SingleStoreRawQueryResult; +} + +export interface SingleStoreDriverPreparedQueryHKT extends SingleStorePreparedQueryHKT { + type: SingleStoreDriverPreparedQuery>; +} diff --git a/drizzle-orm/type-tests/singlestore/1000columns.ts b/drizzle-orm/type-tests/singlestore/1000columns.ts new file mode 100644 index 000000000..f84640858 --- /dev/null +++ b/drizzle-orm/type-tests/singlestore/1000columns.ts @@ -0,0 +1,904 @@ +import { bigint, double, singlestoreTable, varchar } from '~/singlestore-core/index.ts'; + +singlestoreTable('test', { + col0: double('col1').primaryKey().autoincrement().default(0), + col1: double('col1').primaryKey().autoincrement().default(0), + col2: double('col1').primaryKey().autoincrement().default(0), + col3: double('col1').primaryKey().autoincrement().default(0), + col4: double('col1').primaryKey().autoincrement().default(0), + col5: double('col1').primaryKey().autoincrement().default(0), + col6: double('col1').primaryKey().autoincrement().default(0), + col8: double('col1').primaryKey().autoincrement().default(0), + col9: double('col1').primaryKey().autoincrement().default(0), + col10: double('col1').primaryKey().autoincrement().default(0), + col11: double('col1').primaryKey().autoincrement().default(0), + col12: double('col1').primaryKey().autoincrement().default(0), + col13: double('col1').primaryKey().autoincrement().default(0), + col14: double('col1').primaryKey().autoincrement().default(0), + col15: double('col1').primaryKey().autoincrement().default(0), + col16: double('col1').primaryKey().autoincrement().default(0), + col18: double('col1').primaryKey().autoincrement().default(0), + col19: double('col1').primaryKey().autoincrement().default(0), + col20: double('col1').primaryKey().autoincrement().default(0), + col21: double('col1').primaryKey().autoincrement().default(0), + col22: double('col1').primaryKey().autoincrement().default(0), + col23: double('col1').primaryKey().autoincrement().default(0), + col24: double('col1').primaryKey().autoincrement().default(0), + col25: double('col1').primaryKey().autoincrement().default(0), + col26: double('col1').primaryKey().autoincrement().default(0), + col28: double('col1').primaryKey().autoincrement().default(0), + col29: double('col1').primaryKey().autoincrement().default(0), + col30: double('col1').primaryKey().autoincrement().default(0), + col31: double('col1').primaryKey().autoincrement().default(0), + col32: double('col1').primaryKey().autoincrement().default(0), + col33: double('col1').primaryKey().autoincrement().default(0), + col34: double('col1').primaryKey().autoincrement().default(0), + col35: double('col1').primaryKey().autoincrement().default(0), + col36: double('col1').primaryKey().autoincrement().default(0), + col38: double('col1').primaryKey().autoincrement().default(0), + col39: double('col1').primaryKey().autoincrement().default(0), + col40: double('col1').primaryKey().autoincrement().default(0), + col41: double('col1').primaryKey().autoincrement().default(0), + col42: double('col1').primaryKey().autoincrement().default(0), + col43: double('col1').primaryKey().autoincrement().default(0), + col44: double('col1').primaryKey().autoincrement().default(0), + col45: double('col1').primaryKey().autoincrement().default(0), + col46: double('col1').primaryKey().autoincrement().default(0), + col48: double('col1').primaryKey().autoincrement().default(0), + col49: double('col1').primaryKey().autoincrement().default(0), + col50: double('col1').primaryKey().autoincrement().default(0), + col51: double('col1').primaryKey().autoincrement().default(0), + col52: double('col1').primaryKey().autoincrement().default(0), + col53: double('col1').primaryKey().autoincrement().default(0), + col54: double('col1').primaryKey().autoincrement().default(0), + col55: double('col1').primaryKey().autoincrement().default(0), + col56: double('col1').primaryKey().autoincrement().default(0), + col58: double('col1').primaryKey().autoincrement().default(0), + col59: double('col1').primaryKey().autoincrement().default(0), + col60: double('col1').primaryKey().autoincrement().default(0), + col61: double('col1').primaryKey().autoincrement().default(0), + col62: double('col1').primaryKey().autoincrement().default(0), + col63: double('col1').primaryKey().autoincrement().default(0), + col64: double('col1').primaryKey().autoincrement().default(0), + col65: double('col1').primaryKey().autoincrement().default(0), + col66: double('col1').primaryKey().autoincrement().default(0), + col68: double('col1').primaryKey().autoincrement().default(0), + col69: double('col1').primaryKey().autoincrement().default(0), + col70: double('col1').primaryKey().autoincrement().default(0), + col71: double('col1').primaryKey().autoincrement().default(0), + col72: double('col1').primaryKey().autoincrement().default(0), + col73: double('col1').primaryKey().autoincrement().default(0), + col74: double('col1').primaryKey().autoincrement().default(0), + col75: double('col1').primaryKey().autoincrement().default(0), + col76: double('col1').primaryKey().autoincrement().default(0), + col78: double('col1').primaryKey().autoincrement().default(0), + col79: double('col1').primaryKey().autoincrement().default(0), + col80: double('col1').primaryKey().autoincrement().default(0), + col81: double('col1').primaryKey().autoincrement().default(0), + col82: double('col1').primaryKey().autoincrement().default(0), + col83: double('col1').primaryKey().autoincrement().default(0), + col84: double('col1').primaryKey().autoincrement().default(0), + col85: double('col1').primaryKey().autoincrement().default(0), + col86: double('col1').primaryKey().autoincrement().default(0), + col88: double('col1').primaryKey().autoincrement().default(0), + col89: double('col1').primaryKey().autoincrement().default(0), + col90: double('col1').primaryKey().autoincrement().default(0), + col91: double('col1').primaryKey().autoincrement().default(0), + col92: double('col1').primaryKey().autoincrement().default(0), + col93: double('col1').primaryKey().autoincrement().default(0), + col94: double('col1').primaryKey().autoincrement().default(0), + col95: double('col1').primaryKey().autoincrement().default(0), + col96: double('col1').primaryKey().autoincrement().default(0), + col98: double('col1').primaryKey().autoincrement().default(0), + col99: double('col1').primaryKey().autoincrement().default(0), + col100: double('col1').primaryKey().autoincrement().default(0), + col101: double('col1').primaryKey().autoincrement().default(0), + col102: double('col1').primaryKey().autoincrement().default(0), + col103: double('col1').primaryKey().autoincrement().default(0), + col104: double('col1').primaryKey().autoincrement().default(0), + col105: double('col1').primaryKey().autoincrement().default(0), + col106: double('col1').primaryKey().autoincrement().default(0), + col108: double('col1').primaryKey().autoincrement().default(0), + col109: double('col1').primaryKey().autoincrement().default(0), + col110: double('col11').primaryKey().autoincrement().default(0), + col111: double('col11').primaryKey().autoincrement().default(0), + col112: double('col11').primaryKey().autoincrement().default(0), + col113: double('col11').primaryKey().autoincrement().default(0), + col114: double('col11').primaryKey().autoincrement().default(0), + col115: double('col11').primaryKey().autoincrement().default(0), + col116: double('col11').primaryKey().autoincrement().default(0), + col118: double('col11').primaryKey().autoincrement().default(0), + col119: double('col11').primaryKey().autoincrement().default(0), + col120: double('col11').primaryKey().autoincrement().default(0), + col121: double('col11').primaryKey().autoincrement().default(0), + col122: double('col11').primaryKey().autoincrement().default(0), + col123: double('col11').primaryKey().autoincrement().default(0), + col124: double('col11').primaryKey().autoincrement().default(0), + col125: double('col11').primaryKey().autoincrement().default(0), + col126: double('col11').primaryKey().autoincrement().default(0), + col128: double('col11').primaryKey().autoincrement().default(0), + col129: double('col11').primaryKey().autoincrement().default(0), + col130: double('col11').primaryKey().autoincrement().default(0), + col131: double('col11').primaryKey().autoincrement().default(0), + col132: double('col11').primaryKey().autoincrement().default(0), + col133: double('col11').primaryKey().autoincrement().default(0), + col134: double('col11').primaryKey().autoincrement().default(0), + col135: double('col11').primaryKey().autoincrement().default(0), + col136: double('col11').primaryKey().autoincrement().default(0), + col138: double('col11').primaryKey().autoincrement().default(0), + col139: double('col11').primaryKey().autoincrement().default(0), + col140: double('col11').primaryKey().autoincrement().default(0), + col141: double('col11').primaryKey().autoincrement().default(0), + col142: double('col11').primaryKey().autoincrement().default(0), + col143: double('col11').primaryKey().autoincrement().default(0), + col144: double('col11').primaryKey().autoincrement().default(0), + col145: double('col11').primaryKey().autoincrement().default(0), + col146: double('col11').primaryKey().autoincrement().default(0), + col148: double('col11').primaryKey().autoincrement().default(0), + col149: double('col11').primaryKey().autoincrement().default(0), + col150: double('col11').primaryKey().autoincrement().default(0), + col151: double('col11').primaryKey().autoincrement().default(0), + col152: double('col11').primaryKey().autoincrement().default(0), + col153: double('col11').primaryKey().autoincrement().default(0), + col154: double('col11').primaryKey().autoincrement().default(0), + col155: double('col11').primaryKey().autoincrement().default(0), + col156: double('col11').primaryKey().autoincrement().default(0), + col158: double('col11').primaryKey().autoincrement().default(0), + col159: double('col11').primaryKey().autoincrement().default(0), + col160: double('col11').primaryKey().autoincrement().default(0), + col161: double('col11').primaryKey().autoincrement().default(0), + col162: double('col11').primaryKey().autoincrement().default(0), + col163: double('col11').primaryKey().autoincrement().default(0), + col164: double('col11').primaryKey().autoincrement().default(0), + col165: double('col11').primaryKey().autoincrement().default(0), + col166: double('col11').primaryKey().autoincrement().default(0), + col168: double('col11').primaryKey().autoincrement().default(0), + col169: double('col11').primaryKey().autoincrement().default(0), + col170: double('col11').primaryKey().autoincrement().default(0), + col171: double('col11').primaryKey().autoincrement().default(0), + col172: double('col11').primaryKey().autoincrement().default(0), + col173: double('col11').primaryKey().autoincrement().default(0), + col174: double('col11').primaryKey().autoincrement().default(0), + col175: double('col11').primaryKey().autoincrement().default(0), + col176: double('col11').primaryKey().autoincrement().default(0), + col178: double('col11').primaryKey().autoincrement().default(0), + col179: double('col11').primaryKey().autoincrement().default(0), + col180: double('col11').primaryKey().autoincrement().default(0), + col181: double('col11').primaryKey().autoincrement().default(0), + col182: double('col11').primaryKey().autoincrement().default(0), + col183: double('col11').primaryKey().autoincrement().default(0), + col184: double('col11').primaryKey().autoincrement().default(0), + col185: double('col11').primaryKey().autoincrement().default(0), + col186: double('col11').primaryKey().autoincrement().default(0), + col188: double('col11').primaryKey().autoincrement().default(0), + col189: double('col11').primaryKey().autoincrement().default(0), + col190: double('col11').primaryKey().autoincrement().default(0), + col191: double('col11').primaryKey().autoincrement().default(0), + col192: double('col11').primaryKey().autoincrement().default(0), + col193: double('col11').primaryKey().autoincrement().default(0), + col194: double('col11').primaryKey().autoincrement().default(0), + col195: double('col11').primaryKey().autoincrement().default(0), + col196: double('col11').primaryKey().autoincrement().default(0), + col198: double('col11').primaryKey().autoincrement().default(0), + col199: double('col11').primaryKey().autoincrement().default(0), + col200: double('col2').primaryKey().autoincrement().default(0), + col201: double('col2').primaryKey().autoincrement().default(0), + col202: double('col2').primaryKey().autoincrement().default(0), + col203: double('col2').primaryKey().autoincrement().default(0), + col204: double('col2').primaryKey().autoincrement().default(0), + col205: double('col2').primaryKey().autoincrement().default(0), + col206: double('col2').primaryKey().autoincrement().default(0), + col208: double('col2').primaryKey().autoincrement().default(0), + col209: double('col2').primaryKey().autoincrement().default(0), + col210: double('col21').primaryKey().autoincrement().default(0), + col211: double('col21').primaryKey().autoincrement().default(0), + col212: double('col21').primaryKey().autoincrement().default(0), + col213: double('col21').primaryKey().autoincrement().default(0), + col214: double('col21').primaryKey().autoincrement().default(0), + col215: double('col21').primaryKey().autoincrement().default(0), + col216: double('col21').primaryKey().autoincrement().default(0), + col218: double('col21').primaryKey().autoincrement().default(0), + col219: double('col21').primaryKey().autoincrement().default(0), + col220: double('col21').primaryKey().autoincrement().default(0), + col221: double('col21').primaryKey().autoincrement().default(0), + col222: double('col21').primaryKey().autoincrement().default(0), + col223: double('col21').primaryKey().autoincrement().default(0), + col224: double('col21').primaryKey().autoincrement().default(0), + col225: double('col21').primaryKey().autoincrement().default(0), + col226: double('col21').primaryKey().autoincrement().default(0), + col228: double('col21').primaryKey().autoincrement().default(0), + col229: double('col21').primaryKey().autoincrement().default(0), + col230: double('col21').primaryKey().autoincrement().default(0), + col231: double('col21').primaryKey().autoincrement().default(0), + col232: double('col21').primaryKey().autoincrement().default(0), + col233: double('col21').primaryKey().autoincrement().default(0), + col234: double('col21').primaryKey().autoincrement().default(0), + col235: double('col21').primaryKey().autoincrement().default(0), + col236: double('col21').primaryKey().autoincrement().default(0), + col238: double('col21').primaryKey().autoincrement().default(0), + col239: double('col21').primaryKey().autoincrement().default(0), + col240: double('col21').primaryKey().autoincrement().default(0), + col241: double('col21').primaryKey().autoincrement().default(0), + col242: double('col21').primaryKey().autoincrement().default(0), + col243: double('col21').primaryKey().autoincrement().default(0), + col244: double('col21').primaryKey().autoincrement().default(0), + col245: double('col21').primaryKey().autoincrement().default(0), + col246: double('col21').primaryKey().autoincrement().default(0), + col248: double('col21').primaryKey().autoincrement().default(0), + col249: double('col21').primaryKey().autoincrement().default(0), + col250: double('col21').primaryKey().autoincrement().default(0), + col251: double('col21').primaryKey().autoincrement().default(0), + col252: double('col21').primaryKey().autoincrement().default(0), + col253: double('col21').primaryKey().autoincrement().default(0), + col254: double('col21').primaryKey().autoincrement().default(0), + col255: double('col21').primaryKey().autoincrement().default(0), + col256: double('col21').primaryKey().autoincrement().default(0), + col258: double('col21').primaryKey().autoincrement().default(0), + col259: double('col21').primaryKey().autoincrement().default(0), + col260: double('col21').primaryKey().autoincrement().default(0), + col261: double('col21').primaryKey().autoincrement().default(0), + col262: double('col21').primaryKey().autoincrement().default(0), + col263: double('col21').primaryKey().autoincrement().default(0), + col264: double('col21').primaryKey().autoincrement().default(0), + col265: double('col21').primaryKey().autoincrement().default(0), + col266: double('col21').primaryKey().autoincrement().default(0), + col268: double('col21').primaryKey().autoincrement().default(0), + col269: double('col21').primaryKey().autoincrement().default(0), + col270: double('col21').primaryKey().autoincrement().default(0), + col271: double('col21').primaryKey().autoincrement().default(0), + col272: double('col21').primaryKey().autoincrement().default(0), + col273: double('col21').primaryKey().autoincrement().default(0), + col274: double('col21').primaryKey().autoincrement().default(0), + col275: double('col21').primaryKey().autoincrement().default(0), + col276: double('col21').primaryKey().autoincrement().default(0), + col278: double('col21').primaryKey().autoincrement().default(0), + col279: double('col21').primaryKey().autoincrement().default(0), + col280: double('col21').primaryKey().autoincrement().default(0), + col281: double('col21').primaryKey().autoincrement().default(0), + col282: double('col21').primaryKey().autoincrement().default(0), + col283: double('col21').primaryKey().autoincrement().default(0), + col284: double('col21').primaryKey().autoincrement().default(0), + col285: double('col21').primaryKey().autoincrement().default(0), + col286: double('col21').primaryKey().autoincrement().default(0), + col288: double('col21').primaryKey().autoincrement().default(0), + col289: double('col21').primaryKey().autoincrement().default(0), + col290: double('col21').primaryKey().autoincrement().default(0), + col291: double('col21').primaryKey().autoincrement().default(0), + col292: double('col21').primaryKey().autoincrement().default(0), + col293: double('col21').primaryKey().autoincrement().default(0), + col294: double('col21').primaryKey().autoincrement().default(0), + col295: double('col21').primaryKey().autoincrement().default(0), + col296: double('col21').primaryKey().autoincrement().default(0), + col298: double('col21').primaryKey().autoincrement().default(0), + col299: double('col21').primaryKey().autoincrement().default(0), + col300: double('col3').primaryKey().autoincrement().default(0), + col301: double('col3').primaryKey().autoincrement().default(0), + col302: double('col3').primaryKey().autoincrement().default(0), + col303: double('col3').primaryKey().autoincrement().default(0), + col304: double('col3').primaryKey().autoincrement().default(0), + col305: double('col3').primaryKey().autoincrement().default(0), + col306: double('col3').primaryKey().autoincrement().default(0), + col308: double('col3').primaryKey().autoincrement().default(0), + col309: double('col3').primaryKey().autoincrement().default(0), + col310: double('col31').primaryKey().autoincrement().default(0), + col311: double('col31').primaryKey().autoincrement().default(0), + col312: double('col31').primaryKey().autoincrement().default(0), + col313: double('col31').primaryKey().autoincrement().default(0), + col314: double('col31').primaryKey().autoincrement().default(0), + col315: double('col31').primaryKey().autoincrement().default(0), + col316: double('col31').primaryKey().autoincrement().default(0), + col318: double('col31').primaryKey().autoincrement().default(0), + col319: double('col31').primaryKey().autoincrement().default(0), + col320: double('col31').primaryKey().autoincrement().default(0), + col321: double('col31').primaryKey().autoincrement().default(0), + col322: double('col31').primaryKey().autoincrement().default(0), + col323: double('col31').primaryKey().autoincrement().default(0), + col324: double('col31').primaryKey().autoincrement().default(0), + col325: double('col31').primaryKey().autoincrement().default(0), + col326: double('col31').primaryKey().autoincrement().default(0), + col328: double('col31').primaryKey().autoincrement().default(0), + col329: double('col31').primaryKey().autoincrement().default(0), + col330: double('col31').primaryKey().autoincrement().default(0), + col331: double('col31').primaryKey().autoincrement().default(0), + col332: double('col31').primaryKey().autoincrement().default(0), + col333: double('col31').primaryKey().autoincrement().default(0), + col334: double('col31').primaryKey().autoincrement().default(0), + col335: double('col31').primaryKey().autoincrement().default(0), + col336: double('col31').primaryKey().autoincrement().default(0), + col338: double('col31').primaryKey().autoincrement().default(0), + col339: double('col31').primaryKey().autoincrement().default(0), + col340: double('col31').primaryKey().autoincrement().default(0), + col341: double('col31').primaryKey().autoincrement().default(0), + col342: double('col31').primaryKey().autoincrement().default(0), + col343: double('col31').primaryKey().autoincrement().default(0), + col344: double('col31').primaryKey().autoincrement().default(0), + col345: double('col31').primaryKey().autoincrement().default(0), + col346: double('col31').primaryKey().autoincrement().default(0), + col348: double('col31').primaryKey().autoincrement().default(0), + col349: double('col31').primaryKey().autoincrement().default(0), + col350: double('col31').primaryKey().autoincrement().default(0), + col351: double('col31').primaryKey().autoincrement().default(0), + col352: double('col31').primaryKey().autoincrement().default(0), + col353: double('col31').primaryKey().autoincrement().default(0), + col354: double('col31').primaryKey().autoincrement().default(0), + col355: double('col31').primaryKey().autoincrement().default(0), + col356: double('col31').primaryKey().autoincrement().default(0), + col358: double('col31').primaryKey().autoincrement().default(0), + col359: double('col31').primaryKey().autoincrement().default(0), + col360: double('col31').primaryKey().autoincrement().default(0), + col361: double('col31').primaryKey().autoincrement().default(0), + col362: double('col31').primaryKey().autoincrement().default(0), + col363: double('col31').primaryKey().autoincrement().default(0), + col364: double('col31').primaryKey().autoincrement().default(0), + col365: double('col31').primaryKey().autoincrement().default(0), + col366: double('col31').primaryKey().autoincrement().default(0), + col368: double('col31').primaryKey().autoincrement().default(0), + col369: double('col31').primaryKey().autoincrement().default(0), + col370: double('col31').primaryKey().autoincrement().default(0), + col371: double('col31').primaryKey().autoincrement().default(0), + col372: double('col31').primaryKey().autoincrement().default(0), + col373: double('col31').primaryKey().autoincrement().default(0), + col374: double('col31').primaryKey().autoincrement().default(0), + col375: double('col31').primaryKey().autoincrement().default(0), + col376: double('col31').primaryKey().autoincrement().default(0), + col378: double('col31').primaryKey().autoincrement().default(0), + col379: double('col31').primaryKey().autoincrement().default(0), + col380: double('col31').primaryKey().autoincrement().default(0), + col381: double('col31').primaryKey().autoincrement().default(0), + col382: double('col31').primaryKey().autoincrement().default(0), + col383: double('col31').primaryKey().autoincrement().default(0), + col384: double('col31').primaryKey().autoincrement().default(0), + col385: double('col31').primaryKey().autoincrement().default(0), + col386: double('col31').primaryKey().autoincrement().default(0), + col388: double('col31').primaryKey().autoincrement().default(0), + col389: double('col31').primaryKey().autoincrement().default(0), + col390: double('col31').primaryKey().autoincrement().default(0), + col391: double('col31').primaryKey().autoincrement().default(0), + col392: double('col31').primaryKey().autoincrement().default(0), + col393: double('col31').primaryKey().autoincrement().default(0), + col394: double('col31').primaryKey().autoincrement().default(0), + col395: double('col31').primaryKey().autoincrement().default(0), + col396: double('col31').primaryKey().autoincrement().default(0), + col398: double('col31').primaryKey().autoincrement().default(0), + col399: double('col31').primaryKey().autoincrement().default(0), + col400: double('col4').primaryKey().autoincrement().default(0), + col401: double('col4').primaryKey().autoincrement().default(0), + col402: double('col4').primaryKey().autoincrement().default(0), + col403: double('col4').primaryKey().autoincrement().default(0), + col404: double('col4').primaryKey().autoincrement().default(0), + col405: double('col4').primaryKey().autoincrement().default(0), + col406: double('col4').primaryKey().autoincrement().default(0), + col408: double('col4').primaryKey().autoincrement().default(0), + col409: double('col4').primaryKey().autoincrement().default(0), + col410: double('col41').primaryKey().autoincrement().default(0), + col411: double('col41').primaryKey().autoincrement().default(0), + col412: double('col41').primaryKey().autoincrement().default(0), + col413: double('col41').primaryKey().autoincrement().default(0), + col414: double('col41').primaryKey().autoincrement().default(0), + col415: double('col41').primaryKey().autoincrement().default(0), + col416: double('col41').primaryKey().autoincrement().default(0), + col418: double('col41').primaryKey().autoincrement().default(0), + col419: double('col41').primaryKey().autoincrement().default(0), + col420: double('col41').primaryKey().autoincrement().default(0), + col421: double('col41').primaryKey().autoincrement().default(0), + col422: double('col41').primaryKey().autoincrement().default(0), + col423: double('col41').primaryKey().autoincrement().default(0), + col424: double('col41').primaryKey().autoincrement().default(0), + col425: double('col41').primaryKey().autoincrement().default(0), + col426: double('col41').primaryKey().autoincrement().default(0), + col428: double('col41').primaryKey().autoincrement().default(0), + col429: double('col41').primaryKey().autoincrement().default(0), + col430: double('col41').primaryKey().autoincrement().default(0), + col431: double('col41').primaryKey().autoincrement().default(0), + col432: double('col41').primaryKey().autoincrement().default(0), + col433: double('col41').primaryKey().autoincrement().default(0), + col434: double('col41').primaryKey().autoincrement().default(0), + col435: double('col41').primaryKey().autoincrement().default(0), + col436: double('col41').primaryKey().autoincrement().default(0), + col438: double('col41').primaryKey().autoincrement().default(0), + col439: double('col41').primaryKey().autoincrement().default(0), + col440: double('col41').primaryKey().autoincrement().default(0), + col441: double('col41').primaryKey().autoincrement().default(0), + col442: double('col41').primaryKey().autoincrement().default(0), + col443: double('col41').primaryKey().autoincrement().default(0), + col444: double('col41').primaryKey().autoincrement().default(0), + col445: double('col41').primaryKey().autoincrement().default(0), + col446: double('col41').primaryKey().autoincrement().default(0), + col448: double('col41').primaryKey().autoincrement().default(0), + col449: double('col41').primaryKey().autoincrement().default(0), + col450: double('col41').primaryKey().autoincrement().default(0), + col451: double('col41').primaryKey().autoincrement().default(0), + col452: double('col41').primaryKey().autoincrement().default(0), + col453: double('col41').primaryKey().autoincrement().default(0), + col454: double('col41').primaryKey().autoincrement().default(0), + col455: double('col41').primaryKey().autoincrement().default(0), + col456: double('col41').primaryKey().autoincrement().default(0), + col458: double('col41').primaryKey().autoincrement().default(0), + col459: double('col41').primaryKey().autoincrement().default(0), + col460: double('col41').primaryKey().autoincrement().default(0), + col461: double('col41').primaryKey().autoincrement().default(0), + col462: double('col41').primaryKey().autoincrement().default(0), + col463: double('col41').primaryKey().autoincrement().default(0), + col464: double('col41').primaryKey().autoincrement().default(0), + col465: double('col41').primaryKey().autoincrement().default(0), + col466: double('col41').primaryKey().autoincrement().default(0), + col468: double('col41').primaryKey().autoincrement().default(0), + col469: double('col41').primaryKey().autoincrement().default(0), + col470: double('col41').primaryKey().autoincrement().default(0), + col471: double('col41').primaryKey().autoincrement().default(0), + col472: double('col41').primaryKey().autoincrement().default(0), + col473: double('col41').primaryKey().autoincrement().default(0), + col474: double('col41').primaryKey().autoincrement().default(0), + col475: double('col41').primaryKey().autoincrement().default(0), + col476: double('col41').primaryKey().autoincrement().default(0), + col478: double('col41').primaryKey().autoincrement().default(0), + col479: double('col41').primaryKey().autoincrement().default(0), + col480: double('col41').primaryKey().autoincrement().default(0), + col481: double('col41').primaryKey().autoincrement().default(0), + col482: double('col41').primaryKey().autoincrement().default(0), + col483: double('col41').primaryKey().autoincrement().default(0), + col484: double('col41').primaryKey().autoincrement().default(0), + col485: double('col41').primaryKey().autoincrement().default(0), + col486: double('col41').primaryKey().autoincrement().default(0), + col488: double('col41').primaryKey().autoincrement().default(0), + col489: double('col41').primaryKey().autoincrement().default(0), + col490: double('col41').primaryKey().autoincrement().default(0), + col491: double('col41').primaryKey().autoincrement().default(0), + col492: double('col41').primaryKey().autoincrement().default(0), + col493: double('col41').primaryKey().autoincrement().default(0), + col494: double('col41').primaryKey().autoincrement().default(0), + col495: double('col41').primaryKey().autoincrement().default(0), + col496: double('col41').primaryKey().autoincrement().default(0), + col498: double('col41').primaryKey().autoincrement().default(0), + col499: double('col41').primaryKey().autoincrement().default(0), + col500: double('col5').primaryKey().autoincrement().default(0), + col501: double('col5').primaryKey().autoincrement().default(0), + col502: double('col5').primaryKey().autoincrement().default(0), + col503: double('col5').primaryKey().autoincrement().default(0), + col504: double('col5').primaryKey().autoincrement().default(0), + col505: double('col5').primaryKey().autoincrement().default(0), + col506: double('col5').primaryKey().autoincrement().default(0), + col508: double('col5').primaryKey().autoincrement().default(0), + col509: double('col5').primaryKey().autoincrement().default(0), + col510: double('col51').primaryKey().autoincrement().default(0), + col511: double('col51').primaryKey().autoincrement().default(0), + col512: double('col51').primaryKey().autoincrement().default(0), + col513: double('col51').primaryKey().autoincrement().default(0), + col514: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col515: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col516: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col518: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col519: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col520: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col521: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col522: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col523: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col524: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col525: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col526: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col528: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col529: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col530: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col531: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col532: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col533: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col534: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col535: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col536: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col538: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col539: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col540: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col541: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col542: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col543: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col544: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col545: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col546: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col548: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col549: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col550: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col551: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col552: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col553: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col554: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col555: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col556: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col558: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col559: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col560: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col561: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col562: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col563: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col564: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col565: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col566: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col568: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col569: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col570: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col571: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col572: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col573: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col574: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col575: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col576: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col578: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col579: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col580: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col581: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col582: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col583: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col584: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col585: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col586: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col588: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col589: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col590: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col591: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col592: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col593: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col594: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col595: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col596: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col598: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col599: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col600: bigint('col6', { mode: 'number' }).primaryKey().autoincrement().default(0), + col601: double('col6').primaryKey().autoincrement().default(0), + col602: double('col6').primaryKey().autoincrement().default(0), + col603: double('col6').primaryKey().autoincrement().default(0), + col604: double('col6').primaryKey().autoincrement().default(0), + col605: double('col6').primaryKey().autoincrement().default(0), + col606: double('col6').primaryKey().autoincrement().default(0), + col608: double('col6').primaryKey().autoincrement().default(0), + col609: double('col6').primaryKey().autoincrement().default(0), + col610: double('col61').primaryKey().autoincrement().default(0), + col611: double('col61').primaryKey().autoincrement().default(0), + col612: double('col61').primaryKey().autoincrement().default(0), + col613: double('col61').primaryKey().autoincrement().default(0), + col614: double('col61').primaryKey().autoincrement().default(0), + col615: double('col61').primaryKey().autoincrement().default(0), + col616: double('col61').primaryKey().autoincrement().default(0), + col618: double('col61').primaryKey().autoincrement().default(0), + col619: double('col61').primaryKey().autoincrement().default(0), + col620: double('col61').primaryKey().autoincrement().default(0), + col621: double('col61').primaryKey().autoincrement().default(0), + col622: double('col61').primaryKey().autoincrement().default(0), + col623: double('col61').primaryKey().autoincrement().default(0), + col624: double('col61').primaryKey().autoincrement().default(0), + col625: double('col61').primaryKey().autoincrement().default(0), + col626: double('col61').primaryKey().autoincrement().default(0), + col628: double('col61').primaryKey().autoincrement().default(0), + col629: double('col61').primaryKey().autoincrement().default(0), + col630: double('col61').primaryKey().autoincrement().default(0), + col631: double('col61').primaryKey().autoincrement().default(0), + col632: double('col61').primaryKey().autoincrement().default(0), + col633: double('col61').primaryKey().autoincrement().default(0), + col634: double('col61').primaryKey().autoincrement().default(0), + col635: double('col61').primaryKey().autoincrement().default(0), + col636: double('col61').primaryKey().autoincrement().default(0), + col638: double('col61').primaryKey().autoincrement().default(0), + col639: double('col61').primaryKey().autoincrement().default(0), + col640: double('col61').primaryKey().autoincrement().default(0), + col641: double('col61').primaryKey().autoincrement().default(0), + col642: double('col61').primaryKey().autoincrement().default(0), + col643: double('col61').primaryKey().autoincrement().default(0), + col644: double('col61').primaryKey().autoincrement().default(0), + col645: double('col61').primaryKey().autoincrement().default(0), + col646: double('col61').primaryKey().autoincrement().default(0), + col648: double('col61').primaryKey().autoincrement().default(0), + col649: double('col61').primaryKey().autoincrement().default(0), + col650: double('col61').primaryKey().autoincrement().default(0), + col651: double('col61').primaryKey().autoincrement().default(0), + col652: double('col61').primaryKey().autoincrement().default(0), + col653: double('col61').primaryKey().autoincrement().default(0), + col654: double('col61').primaryKey().autoincrement().default(0), + col655: double('col61').primaryKey().autoincrement().default(0), + col656: double('col61').primaryKey().autoincrement().default(0), + col658: double('col61').primaryKey().autoincrement().default(0), + col659: double('col61').primaryKey().autoincrement().default(0), + col660: double('col61').primaryKey().autoincrement().default(0), + col661: double('col61').primaryKey().autoincrement().default(0), + col662: double('col61').primaryKey().autoincrement().default(0), + col663: double('col61').primaryKey().autoincrement().default(0), + col664: double('col61').primaryKey().autoincrement().default(0), + col665: double('col61').primaryKey().autoincrement().default(0), + col666: double('col61').primaryKey().autoincrement().default(0), + col668: double('col61').primaryKey().autoincrement().default(0), + col669: double('col61').primaryKey().autoincrement().default(0), + col670: double('col61').primaryKey().autoincrement().default(0), + col671: double('col61').primaryKey().autoincrement().default(0), + col672: double('col61').primaryKey().autoincrement().default(0), + col673: double('col61').primaryKey().autoincrement().default(0), + col674: double('col61').primaryKey().autoincrement().default(0), + col675: double('col61').primaryKey().autoincrement().default(0), + col676: double('col61').primaryKey().autoincrement().default(0), + col678: double('col61').primaryKey().autoincrement().default(0), + col679: double('col61').primaryKey().autoincrement().default(0), + col680: double('col61').primaryKey().autoincrement().default(0), + col681: double('col61').primaryKey().autoincrement().default(0), + col682: double('col61').primaryKey().autoincrement().default(0), + col683: double('col61').primaryKey().autoincrement().default(0), + col684: double('col61').primaryKey().autoincrement().default(0), + col685: double('col61').primaryKey().autoincrement().default(0), + col686: double('col61').primaryKey().autoincrement().default(0), + col688: double('col61').primaryKey().autoincrement().default(0), + col689: double('col61').primaryKey().autoincrement().default(0), + col690: double('col61').primaryKey().autoincrement().default(0), + col691: double('col61').primaryKey().autoincrement().default(0), + col692: double('col61').primaryKey().autoincrement().default(0), + col693: double('col61').primaryKey().autoincrement().default(0), + col694: double('col61').primaryKey().autoincrement().default(0), + col695: double('col61').primaryKey().autoincrement().default(0), + col696: double('col61').primaryKey().autoincrement().default(0), + col698: double('col61').primaryKey().autoincrement().default(0), + col699: double('col61').primaryKey().autoincrement().default(0), + col700: double('col7').primaryKey().autoincrement().default(0), + col701: double('col7').primaryKey().autoincrement().default(0), + col702: double('col7').primaryKey().autoincrement().default(0), + col703: double('col7').primaryKey().autoincrement().default(0), + col704: double('col7').primaryKey().autoincrement().default(0), + col705: double('col7').primaryKey().autoincrement().default(0), + col706: double('col7').primaryKey().autoincrement().default(0), + col708: double('col7').primaryKey().autoincrement().default(0), + col709: double('col7').primaryKey().autoincrement().default(0), + col710: double('col71').primaryKey().autoincrement().default(0), + col711: double('col71').primaryKey().autoincrement().default(0), + col712: double('col71').primaryKey().autoincrement().default(0), + col713: double('col71').primaryKey().autoincrement().default(0), + col714: double('col71').primaryKey().autoincrement().default(0), + col715: double('col71').primaryKey().autoincrement().default(0), + col716: double('col71').primaryKey().autoincrement().default(0), + col718: double('col71').primaryKey().autoincrement().default(0), + col719: double('col71').primaryKey().autoincrement().default(0), + col720: double('col71').primaryKey().autoincrement().default(0), + col721: double('col71').primaryKey().autoincrement().default(0), + col722: double('col71').primaryKey().autoincrement().default(0), + col723: double('col71').primaryKey().autoincrement().default(0), + col724: double('col71').primaryKey().autoincrement().default(0), + col725: double('col71').primaryKey().autoincrement().default(0), + col726: double('col71').primaryKey().autoincrement().default(0), + col728: double('col71').primaryKey().autoincrement().default(0), + col729: double('col71').primaryKey().autoincrement().default(0), + col730: double('col71').primaryKey().autoincrement().default(0), + col731: double('col71').primaryKey().autoincrement().default(0), + col732: double('col71').primaryKey().autoincrement().default(0), + col733: double('col71').primaryKey().autoincrement().default(0), + col734: double('col71').primaryKey().autoincrement().default(0), + col735: double('col71').primaryKey().autoincrement().default(0), + col736: double('col71').primaryKey().autoincrement().default(0), + col738: double('col71').primaryKey().autoincrement().default(0), + col739: double('col71').primaryKey().autoincrement().default(0), + col740: double('col71').primaryKey().autoincrement().default(0), + col741: double('col71').primaryKey().autoincrement().default(0), + col742: double('col71').primaryKey().autoincrement().default(0), + col743: double('col71').primaryKey().autoincrement().default(0), + col744: double('col71').primaryKey().autoincrement().default(0), + col745: double('col71').primaryKey().autoincrement().default(0), + col746: double('col71').primaryKey().autoincrement().default(0), + col748: double('col71').primaryKey().autoincrement().default(0), + col749: double('col71').primaryKey().autoincrement().default(0), + col750: double('col71').primaryKey().autoincrement().default(0), + col751: double('col71').primaryKey().autoincrement().default(0), + col752: double('col71').primaryKey().autoincrement().default(0), + col753: double('col71').primaryKey().autoincrement().default(0), + col754: double('col71').primaryKey().autoincrement().default(0), + col755: double('col71').primaryKey().autoincrement().default(0), + col756: double('col71').primaryKey().autoincrement().default(0), + col758: double('col71').primaryKey().autoincrement().default(0), + col759: double('col71').primaryKey().autoincrement().default(0), + col760: double('col71').primaryKey().autoincrement().default(0), + col761: double('col71').primaryKey().autoincrement().default(0), + col762: double('col71').primaryKey().autoincrement().default(0), + col763: double('col71').primaryKey().autoincrement().default(0), + col764: double('col71').primaryKey().autoincrement().default(0), + col765: double('col71').primaryKey().autoincrement().default(0), + col766: double('col71').primaryKey().autoincrement().default(0), + col768: double('col71').primaryKey().autoincrement().default(0), + col769: double('col71').primaryKey().autoincrement().default(0), + col770: double('col71').primaryKey().autoincrement().default(0), + col771: double('col71').primaryKey().autoincrement().default(0), + col772: double('col71').primaryKey().autoincrement().default(0), + col773: double('col71').primaryKey().autoincrement().default(0), + col774: double('col71').primaryKey().autoincrement().default(0), + col775: double('col71').primaryKey().autoincrement().default(0), + col776: double('col71').primaryKey().autoincrement().default(0), + col778: double('col71').primaryKey().autoincrement().default(0), + col779: double('col71').primaryKey().autoincrement().default(0), + col780: double('col71').primaryKey().autoincrement().default(0), + col781: double('col71').primaryKey().autoincrement().default(0), + col782: double('col71').primaryKey().autoincrement().default(0), + col783: double('col71').primaryKey().autoincrement().default(0), + col784: double('col71').primaryKey().autoincrement().default(0), + col785: double('col71').primaryKey().autoincrement().default(0), + col786: double('col71').primaryKey().autoincrement().default(0), + col788: double('col71').primaryKey().autoincrement().default(0), + col789: double('col71').primaryKey().autoincrement().default(0), + col790: double('col71').primaryKey().autoincrement().default(0), + col791: double('col71').primaryKey().autoincrement().default(0), + col792: double('col71').primaryKey().autoincrement().default(0), + col793: double('col71').primaryKey().autoincrement().default(0), + col794: double('col71').primaryKey().autoincrement().default(0), + col795: double('col71').primaryKey().autoincrement().default(0), + col796: double('col71').primaryKey().autoincrement().default(0), + col798: double('col71').primaryKey().autoincrement().default(0), + col799: double('col71').primaryKey().autoincrement().default(0), + col800: double('col8').primaryKey().autoincrement().default(0), + col801: double('col8').primaryKey().autoincrement().default(0), + col802: double('col8').primaryKey().autoincrement().default(0), + col803: double('col8').primaryKey().autoincrement().default(0), + col804: double('col8').primaryKey().autoincrement().default(0), + col805: double('col8').primaryKey().autoincrement().default(0), + col806: double('col8').primaryKey().autoincrement().default(0), + col808: double('col8').primaryKey().autoincrement().default(0), + col809: double('col8').primaryKey().autoincrement().default(0), + col810: double('col81').primaryKey().autoincrement().default(0), + col811: double('col81').primaryKey().autoincrement().default(0), + col812: double('col81').primaryKey().autoincrement().default(0), + col813: double('col81').primaryKey().autoincrement().default(0), + col814: double('col81').primaryKey().autoincrement().default(0), + col815: double('col81').primaryKey().autoincrement().default(0), + col816: double('col81').primaryKey().autoincrement().default(0), + col818: double('col81').primaryKey().autoincrement().default(0), + col819: double('col81').primaryKey().autoincrement().default(0), + col820: double('col81').primaryKey().autoincrement().default(0), + col821: double('col81').primaryKey().autoincrement().default(0), + col822: double('col81').primaryKey().autoincrement().default(0), + col823: double('col81').primaryKey().autoincrement().default(0), + col824: double('col81').primaryKey().autoincrement().default(0), + col825: double('col81').primaryKey().autoincrement().default(0), + col826: double('col81').primaryKey().autoincrement().default(0), + col828: double('col81').primaryKey().autoincrement().default(0), + col829: double('col81').primaryKey().autoincrement().default(0), + col830: double('col81').primaryKey().autoincrement().default(0), + col831: double('col81').primaryKey().autoincrement().default(0), + col832: double('col81').primaryKey().autoincrement().default(0), + col833: double('col81').primaryKey().autoincrement().default(0), + col834: double('col81').primaryKey().autoincrement().default(0), + col835: double('col81').primaryKey().autoincrement().default(0), + col836: double('col81').primaryKey().autoincrement().default(0), + col838: double('col81').primaryKey().autoincrement().default(0), + col839: double('col81').primaryKey().autoincrement().default(0), + col840: double('col81').primaryKey().autoincrement().default(0), + col841: double('col81').primaryKey().autoincrement().default(0), + col842: double('col81').primaryKey().autoincrement().default(0), + col843: double('col81').primaryKey().autoincrement().default(0), + col844: double('col81').primaryKey().autoincrement().default(0), + col845: double('col81').primaryKey().autoincrement().default(0), + col846: double('col81').primaryKey().autoincrement().default(0), + col848: double('col81').primaryKey().autoincrement().default(0), + col849: double('col81').primaryKey().autoincrement().default(0), + col850: double('col81').primaryKey().autoincrement().default(0), + col851: double('col81').primaryKey().autoincrement().default(0), + col852: double('col81').primaryKey().autoincrement().default(0), + col853: double('col81').primaryKey().autoincrement().default(0), + col854: double('col81').primaryKey().autoincrement().default(0), + col855: double('col81').primaryKey().autoincrement().default(0), + col856: double('col81').primaryKey().autoincrement().default(0), + col858: double('col81').primaryKey().autoincrement().default(0), + col859: double('col81').primaryKey().autoincrement().default(0), + col860: double('col81').primaryKey().autoincrement().default(0), + col861: double('col81').primaryKey().autoincrement().default(0), + col862: double('col81').primaryKey().autoincrement().default(0), + col863: double('col81').primaryKey().autoincrement().default(0), + col864: double('col81').primaryKey().autoincrement().default(0), + col865: double('col81').primaryKey().autoincrement().default(0), + col866: double('col81').primaryKey().autoincrement().default(0), + col868: double('col81').primaryKey().autoincrement().default(0), + col869: double('col81').primaryKey().autoincrement().default(0), + col870: double('col81').primaryKey().autoincrement().default(0), + col871: double('col81').primaryKey().autoincrement().default(0), + col872: double('col81').primaryKey().autoincrement().default(0), + col873: double('col81').primaryKey().autoincrement().default(0), + col874: double('col81').primaryKey().autoincrement().default(0), + col875: double('col81').primaryKey().autoincrement().default(0), + col876: double('col81').primaryKey().autoincrement().default(0), + col878: double('col81').primaryKey().autoincrement().default(0), + col879: double('col81').primaryKey().autoincrement().default(0), + col880: double('col81').primaryKey().autoincrement().default(0), + col881: double('col81').primaryKey().autoincrement().default(0), + col882: double('col81').primaryKey().autoincrement().default(0), + col883: double('col81').primaryKey().autoincrement().default(0), + col884: double('col81').primaryKey().autoincrement().default(0), + col885: double('col81').primaryKey().autoincrement().default(0), + col886: double('col81').primaryKey().autoincrement().default(0), + col888: double('col81').primaryKey().autoincrement().default(0), + col889: double('col81').primaryKey().autoincrement().default(0), + col890: double('col81').primaryKey().autoincrement().default(0), + col891: double('col81').primaryKey().autoincrement().default(0), + col892: double('col81').primaryKey().autoincrement().default(0), + col893: double('col81').primaryKey().autoincrement().default(0), + col894: double('col81').primaryKey().autoincrement().default(0), + col895: double('col81').primaryKey().autoincrement().default(0), + col896: double('col81').primaryKey().autoincrement().default(0), + col898: double('col81').primaryKey().autoincrement().default(0), + col899: double('col81').primaryKey().autoincrement().default(0), + col900: double('col9').primaryKey().autoincrement().default(0), + col901: double('col9').primaryKey().autoincrement().default(0), + col902: double('col9').primaryKey().autoincrement().default(0), + col903: double('col9').primaryKey().autoincrement().default(0), + col904: double('col9').primaryKey().autoincrement().default(0), + col905: double('col9').primaryKey().autoincrement().default(0), + col906: double('col9').primaryKey().autoincrement().default(0), + col908: double('col9').primaryKey().autoincrement().default(0), + col909: double('col9').primaryKey().autoincrement().default(0), + col910: double('col91').primaryKey().autoincrement().default(0), + col911: double('col91').primaryKey().autoincrement().default(0), + col912: double('col91').primaryKey().autoincrement().default(0), + col913: double('col91').primaryKey().autoincrement().default(0), + col914: double('col91').primaryKey().autoincrement().default(0), + col915: double('col91').primaryKey().autoincrement().default(0), + col916: double('col91').primaryKey().autoincrement().default(0), + col918: double('col91').primaryKey().autoincrement().default(0), + col919: double('col91').primaryKey().autoincrement().default(0), + col920: double('col91').primaryKey().autoincrement().default(0), + col921: double('col91').primaryKey().autoincrement().default(0), + col922: double('col91').primaryKey().autoincrement().default(0), + col923: double('col91').primaryKey().autoincrement().default(0), + col924: double('col91').primaryKey().autoincrement().default(0), + col925: double('col91').primaryKey().autoincrement().default(0), + col926: double('col91').primaryKey().autoincrement().default(0), + col928: double('col91').primaryKey().autoincrement().default(0), + col929: double('col91').primaryKey().autoincrement().default(0), + col930: double('col91').primaryKey().autoincrement().default(0), + col931: double('col91').primaryKey().autoincrement().default(0), + col932: double('col91').primaryKey().autoincrement().default(0), + col933: double('col91').primaryKey().autoincrement().default(0), + col934: double('col91').primaryKey().autoincrement().default(0), + col935: double('col91').primaryKey().autoincrement().default(0), + col936: double('col91').primaryKey().autoincrement().default(0), + col938: double('col91').primaryKey().autoincrement().default(0), + col939: double('col91').primaryKey().autoincrement().default(0), + col940: double('col91').primaryKey().autoincrement().default(0), + col941: double('col91').primaryKey().autoincrement().default(0), + col942: double('col91').primaryKey().autoincrement().default(0), + col943: double('col91').primaryKey().autoincrement().default(0), + col944: varchar('col91', { length: 200 }).primaryKey().default('0'), + col945: varchar('col91', { length: 200 }).primaryKey().default('0'), + col946: varchar('col91', { length: 200 }).primaryKey().default('0'), + col948: varchar('col91', { length: 200 }).primaryKey().default('0'), + col949: varchar('col91', { length: 200 }).primaryKey().default('0'), + col950: varchar('col91', { length: 200 }).primaryKey().default('0'), + col951: varchar('col91', { length: 200 }).primaryKey().default('0'), + col952: varchar('col91', { length: 200 }).primaryKey().default('0'), + col953: varchar('col91', { length: 200 }).primaryKey().default('0'), + col954: varchar('col91', { length: 200 }).primaryKey().default('0'), + col955: varchar('col91', { length: 200 }).primaryKey().default('0'), + col956: varchar('col91', { length: 200 }).primaryKey().default('0'), + col958: varchar('col91', { length: 200 }).primaryKey().default('0'), + col959: varchar('col91', { length: 200 }).primaryKey().default('0'), + col960: varchar('col91', { length: 200 }).primaryKey().default('0'), + col961: varchar('col91', { length: 200 }).primaryKey().default('0'), + col962: varchar('col91', { length: 200 }).primaryKey().default('0'), + col963: varchar('col91', { length: 200 }).primaryKey().default('0'), + col964: varchar('col91', { length: 200 }).primaryKey().default('0'), + col965: varchar('col91', { length: 200 }).primaryKey().default('0'), + col966: varchar('col91', { length: 200 }).primaryKey().default('0'), + col968: varchar('col91', { length: 200 }).primaryKey().default('0'), + col969: varchar('col91', { length: 200 }).primaryKey().default('0'), + col970: varchar('col91', { length: 200 }).primaryKey().default('0'), + col971: varchar('col91', { length: 200 }).primaryKey().default('0'), + col972: varchar('col91', { length: 200 }).primaryKey().default('0'), + col973: varchar('col91', { length: 200 }).primaryKey().default('0'), + col974: varchar('col91', { length: 200 }).primaryKey().default('0'), + col975: varchar('col91', { length: 200 }).primaryKey().default('0'), + col976: varchar('col91', { length: 200 }).primaryKey().default('0'), + col978: varchar('col91', { length: 200 }).primaryKey().default('0'), + col979: varchar('col91', { length: 200 }).primaryKey().default('0'), + col980: varchar('col91', { length: 200 }).primaryKey().default('0'), + col981: varchar('col91', { length: 200 }).primaryKey().default('0'), + col982: varchar('col91', { length: 200 }).primaryKey().default('0'), + col983: varchar('col91', { length: 200 }).primaryKey().default('0'), + col984: varchar('col91', { length: 200 }).primaryKey().default('0'), + col985: varchar('col91', { length: 200 }).primaryKey().default('0'), + col986: varchar('col91', { length: 200 }).primaryKey().default('0'), + col988: varchar('col91', { length: 200 }).primaryKey().default('0'), + col989: varchar('col91', { length: 200 }).primaryKey().default('0'), + col990: varchar('col91', { length: 200 }).primaryKey().default('0'), + col991: varchar('col91', { length: 200 }).primaryKey().default('0'), + col992: varchar('col91', { length: 200 }).primaryKey().default('0'), + col993: varchar('col91', { length: 200 }).primaryKey().default('0'), + col994: varchar('col91', { length: 200 }).primaryKey().default('0'), + col995: varchar('col91', { length: 200 }).primaryKey().default('0'), + col996: varchar('col91', { length: 200 }).primaryKey().default('0'), + col998: varchar('col91', { length: 200 }).primaryKey().default('0'), + col999: varchar('col91', { length: 200 }).primaryKey().default('0'), +}); diff --git a/drizzle-orm/type-tests/singlestore/count.ts b/drizzle-orm/type-tests/singlestore/count.ts new file mode 100644 index 000000000..50abc8c3a --- /dev/null +++ b/drizzle-orm/type-tests/singlestore/count.ts @@ -0,0 +1,61 @@ +import { Expect } from 'type-tests/utils.ts'; +import { and, gt, ne } from '~/expressions.ts'; +import { int, serial, singlestoreTable, text } from '~/singlestore-core/index.ts'; +import type { Equal } from '~/utils.ts'; +import { db } from './db.ts'; + +const names = singlestoreTable('names', { + id: serial('id').primaryKey(), + name: text('name'), + authorId: int('author_id'), +}); + +const separate = await db.$count(names); + +const separateFilters = await db.$count(names, and(gt(names.id, 1), ne(names.name, 'forbidden'))); + +const embedded = await db + .select({ + id: names.id, + name: names.name, + authorId: names.authorId, + count1: db.$count(names).as('count1'), + }) + .from(names); + +const embeddedFilters = await db + .select({ + id: names.id, + name: names.name, + authorId: names.authorId, + count1: db.$count(names, and(gt(names.id, 1), ne(names.name, 'forbidden'))).as('count1'), + }) + .from(names); + +Expect>; + +Expect>; + +Expect< + Equal< + { + id: number; + name: string | null; + authorId: number | null; + count1: number; + }[], + typeof embedded + > +>; + +Expect< + Equal< + { + id: number; + name: string | null; + authorId: number | null; + count1: number; + }[], + typeof embeddedFilters + > +>; diff --git a/drizzle-orm/type-tests/singlestore/db.ts b/drizzle-orm/type-tests/singlestore/db.ts new file mode 100644 index 000000000..b314e504d --- /dev/null +++ b/drizzle-orm/type-tests/singlestore/db.ts @@ -0,0 +1,13 @@ +import { createPool } from 'mysql2/promise'; +import { drizzle } from '~/singlestore/index.ts'; + +const pool = createPool({}); + +export const db = drizzle(pool); + +{ + drizzle(pool); + drizzle(pool, { schema: {} }); + drizzle(pool, { schema: {} }); + drizzle(pool, {}); +} diff --git a/drizzle-orm/type-tests/singlestore/delete.ts b/drizzle-orm/type-tests/singlestore/delete.ts new file mode 100644 index 000000000..db58ac2ec --- /dev/null +++ b/drizzle-orm/type-tests/singlestore/delete.ts @@ -0,0 +1,65 @@ +import type { Equal } from 'type-tests/utils.ts'; +import { Expect } from 'type-tests/utils.ts'; +import { eq } from '~/expressions.ts'; +import type { SingleStoreDelete } from '~/singlestore-core/index.ts'; +import type { SingleStoreRawQueryResult } from '~/singlestore/index.ts'; +import { sql } from '~/sql/sql.ts'; +import { db } from './db.ts'; +import { users } from './tables.ts'; + +const deleteAll = await db.delete(users); +Expect>; + +const deleteAllStmt = db.delete(users).prepare(); +const deleteAllPrepared = await deleteAllStmt.execute(); +Expect>; + +const deleteWhere = await db.delete(users).where(eq(users.id, 1)); +Expect>; + +const deleteWhereStmt = db.delete(users).where(eq(users.id, 1)).prepare(); +const deleteWherePrepared = await deleteWhereStmt.execute(); +Expect>; + +const deleteReturningAll = await db.delete(users); +Expect>; + +const deleteReturningAllStmt = db.delete(users).prepare(); +const deleteReturningAllPrepared = await deleteReturningAllStmt.execute(); +Expect>; + +const deleteReturningPartial = await db.delete(users); +Expect>; + +const deleteReturningPartialStmt = db.delete(users).prepare(); +const deleteReturningPartialPrepared = await deleteReturningPartialStmt.execute(); +Expect>; + +{ + function dynamic(qb: T) { + return qb.where(sql``); + } + + const qbBase = db.delete(users).$dynamic(); + const qb = dynamic(qbBase); + const result = await qb; + Expect>; +} + +{ + db + .delete(users) + .where(sql``) + // @ts-expect-error method was already called + .where(sql``); + + db + .delete(users) + .$dynamic() + .where(sql``) + .where(sql``); +} + +{ + db.delete(users).where(sql``).limit(1).orderBy(sql``); +} diff --git a/drizzle-orm/type-tests/singlestore/insert.ts b/drizzle-orm/type-tests/singlestore/insert.ts new file mode 100644 index 000000000..738bf669d --- /dev/null +++ b/drizzle-orm/type-tests/singlestore/insert.ts @@ -0,0 +1,135 @@ +import type { Equal } from 'type-tests/utils.ts'; +import { Expect } from 'type-tests/utils.ts'; +import { int, singlestoreTable, text } from '~/singlestore-core/index.ts'; +import type { SingleStoreInsert } from '~/singlestore-core/index.ts'; +import type { SingleStoreRawQueryResult } from '~/singlestore/index.ts'; +import { sql } from '~/sql/sql.ts'; +import { db } from './db.ts'; +import { users } from './tables.ts'; + +const singlestoreInsertReturning = await db.insert(users).values({ + // ^? + homeCity: 1, + class: 'A', + age1: 1, + enumCol: 'a', +}).$returningId(); + +Expect>; + +const insert = await db.insert(users).values({ + homeCity: 1, + class: 'A', + age1: 1, + enumCol: 'a', +}); +Expect>; + +const insertStmt = db.insert(users).values({ + homeCity: 1, + class: 'A', + age1: 1, + enumCol: 'a', +}).prepare(); +const insertPrepared = await insertStmt.execute(); +Expect>; + +const insertSql = await db.insert(users).values({ + homeCity: sql`123`, + class: 'A', + age1: 1, + enumCol: sql`foobar`, +}); +Expect>; + +const insertSqlStmt = db.insert(users).values({ + homeCity: sql`123`, + class: 'A', + age1: 1, + enumCol: sql`foobar`, +}).prepare(); +const insertSqlPrepared = await insertSqlStmt.execute(); +Expect>; + +const insertReturning = await db.insert(users).values({ + homeCity: 1, + class: 'A', + age1: 1, + enumCol: 'a', +}); +Expect>; + +const insertReturningStmt = db.insert(users).values({ + homeCity: 1, + class: 'A', + age1: 1, + enumCol: 'a', +}).prepare(); +const insertReturningPrepared = await insertReturningStmt.execute(); +Expect>; + +const insertReturningPartial = await db.insert(users).values({ + homeCity: 1, + class: 'A', + age1: 1, + enumCol: 'a', +}); +Expect>; + +const insertReturningPartialStmt = db.insert(users).values({ + homeCity: 1, + class: 'A', + age1: 1, + enumCol: 'a', +}).prepare(); +const insertReturningPartialPrepared = await insertReturningPartialStmt.execute(); +Expect>; + +const insertReturningSql = await db.insert(users).values({ + homeCity: 1, + class: 'A', + age1: sql`2 + 2`, + enumCol: 'a', +}); +Expect>; + +const insertReturningSqlStmt = db.insert(users).values({ + homeCity: 1, + class: 'A', + age1: sql`2 + 2`, + enumCol: 'a', +}).prepare(); +const insertReturningSqlPrepared = await insertReturningSqlStmt.execute(); +Expect>; + +{ + const users = singlestoreTable('users', { + id: int('id').autoincrement().primaryKey(), + name: text('name').notNull(), + age: int('age'), + occupation: text('occupation'), + }); + + await db.insert(users).values({ name: 'John Wick', age: 58, occupation: 'housekeeper' }); +} + +{ + function dynamic(qb: T) { + return qb.onDuplicateKeyUpdate({ set: {} }); + } + + const qbBase = db.insert(users).values({ age1: 0, class: 'A', enumCol: 'a', homeCity: 0 }).$dynamic(); + const qb = dynamic(qbBase); + const result = await qb; + + Expect>; +} + +{ + db + .insert(users) + .values({ age1: 0, class: 'A', enumCol: 'a', homeCity: 0 }) + .onDuplicateKeyUpdate({ set: {} }) + // @ts-expect-error method was already called + .onDuplicateKeyUpdate({ set: {} }); +} diff --git a/drizzle-orm/type-tests/singlestore/select.ts b/drizzle-orm/type-tests/singlestore/select.ts new file mode 100644 index 000000000..f7a5094ea --- /dev/null +++ b/drizzle-orm/type-tests/singlestore/select.ts @@ -0,0 +1,607 @@ +import { + and, + between, + eq, + exists, + gt, + gte, + ilike, + inArray, + isNotNull, + isNull, + like, + lt, + lte, + ne, + not, + notBetween, + notExists, + notIlike, + notInArray, + notLike, + or, +} from '~/expressions.ts'; +import { alias } from '~/singlestore-core/alias.ts'; +import { param, sql } from '~/sql/sql.ts'; + +import type { Equal } from 'type-tests/utils.ts'; +import { Expect } from 'type-tests/utils.ts'; +import { QueryBuilder, type SingleStoreSelect, type SingleStoreSelectQueryBuilder } from '~/singlestore-core/index.ts'; +import { db } from './db.ts'; +import { cities, classes, users } from './tables.ts'; + +const city = alias(cities, 'city'); +const city1 = alias(cities, 'city1'); + +const join = await db + .select({ + users, + cities, + city, + city1: { + id: city1.id, + }, + }) + .from(users) + .leftJoin(cities, eq(users.id, cities.id)) + .rightJoin(city, eq(city.id, users.id)) + .rightJoin(city1, eq(city1.id, users.id)); + +Expect< + Equal< + { + users: { + id: number; + text: string | null; + homeCity: number; + currentCity: number | null; + serialNullable: number; + serialNotNull: number; + class: 'A' | 'C'; + subClass: 'B' | 'D' | null; + age1: number; + createdAt: Date; + enumCol: 'a' | 'b' | 'c'; + } | null; + cities: { + id: number; + name: string; + population: number | null; + } | null; + city: { + id: number; + name: string; + population: number | null; + } | null; + city1: { + id: number; + }; + }[], + typeof join + > +>; + +const join2 = await db + .select({ + userId: users.id, + cityId: cities.id, + }) + .from(users) + .fullJoin(cities, eq(users.id, cities.id)); + +Expect< + Equal< + { + userId: number | null; + cityId: number | null; + }[], + typeof join2 + > +>; + +const join3 = await db + .select({ + userId: users.id, + cityId: cities.id, + classId: classes.id, + }) + .from(users) + .fullJoin(cities, eq(users.id, cities.id)) + .rightJoin(classes, eq(users.id, classes.id)); + +Expect< + Equal< + { + userId: number | null; + cityId: number | null; + classId: number; + }[], + typeof join3 + > +>; + +db + .select() + .from(users) + .where(exists(db.select().from(cities).where(eq(users.homeCity, cities.id)))); + +function mapFunkyFuncResult(valueFromDriver: unknown) { + return { + foo: (valueFromDriver as Record)['foo'], + }; +} + +const age = 1; + +const allOperators = await db + .select({ + col2: sql`5 - ${users.id} + 1`, // unknown + col3: sql`${users.id} + 1`, // number + col33: sql`${users.id} + 1`.mapWith(users.id), // number + col34: sql`${users.id} + 1`.mapWith(mapFunkyFuncResult), // number + col4: sql`one_or_another(${users.id}, ${users.class})`, // string | number + col5: sql`true`, // unknown + col6: sql`true`, // boolean + col7: sql`random()`, // number + col8: sql`some_funky_func(${users.id})`.mapWith(mapFunkyFuncResult), // { foo: string } + col9: sql`greatest(${users.createdAt}, ${param(new Date(), users.createdAt)})`, // unknown + col10: sql`date_or_false(${users.createdAt}, ${param(new Date(), users.createdAt)})`, // Date | boolean + col11: sql`${users.age1} + ${age}`, // unknown + col12: sql`${users.age1} + ${param(age, users.age1)}`, // unknown + col13: sql`lower(${users.class})`, // unknown + col14: sql`length(${users.class})`, // number + count: sql`count(*)::int`, // number + }) + .from(users) + .where(and( + eq(users.id, 1), + ne(users.id, 1), + or(eq(users.id, 1), ne(users.id, 1)), + not(eq(users.id, 1)), + gt(users.id, 1), + gte(users.id, 1), + lt(users.id, 1), + lte(users.id, 1), + inArray(users.id, [1, 2, 3]), + inArray(users.id, db.select({ id: users.id }).from(users)), + inArray(users.id, sql`select id from ${users}`), + notInArray(users.id, [1, 2, 3]), + notInArray(users.id, db.select({ id: users.id }).from(users)), + notInArray(users.id, sql`select id from ${users}`), + isNull(users.subClass), + isNotNull(users.id), + exists(db.select({ id: users.id }).from(users)), + exists(sql`select id from ${users}`), + notExists(db.select({ id: users.id }).from(users)), + notExists(sql`select id from ${users}`), + between(users.id, 1, 2), + notBetween(users.id, 1, 2), + like(users.id, '%1%'), + notLike(users.id, '%1%'), + ilike(users.id, '%1%'), + notIlike(users.id, '%1%'), + )); + +Expect< + Equal<{ + col2: unknown; + col3: number; + col33: number; + col34: { foo: any }; + col4: string | number; + col5: unknown; + col6: boolean; + col7: number; + col8: { + foo: any; + }; + col9: unknown; + col10: boolean | Date; + col11: unknown; + col12: unknown; + col13: unknown; + col14: number; + count: number; + }[], typeof allOperators> +>; + +const textSelect = await db + .select({ + t: users.text, + }) + .from(users); + +Expect>; + +const homeCity = alias(cities, 'homeCity'); +const c = alias(classes, 'c'); +const otherClass = alias(classes, 'otherClass'); +const anotherClass = alias(classes, 'anotherClass'); +const friend = alias(users, 'friend'); +const currentCity = alias(cities, 'currentCity'); +const subscriber = alias(users, 'subscriber'); +const closestCity = alias(cities, 'closestCity'); + +const megaJoin = await db + .select({ + user: { + id: users.id, + maxAge: sql`max(${users.age1})`, + }, + city: { + id: cities.id, + }, + homeCity, + c, + otherClass, + anotherClass, + friend, + currentCity, + subscriber, + closestCity, + }) + .from(users) + .innerJoin(cities, sql`${users.id} = ${cities.id}`) + .innerJoin(homeCity, sql`${users.homeCity} = ${homeCity.id}`) + .innerJoin(c, eq(c.id, users.class)) + .innerJoin(otherClass, sql`${c.id} = ${otherClass.id}`) + .innerJoin(anotherClass, sql`${users.class} = ${anotherClass.id}`) + .innerJoin(friend, sql`${users.id} = ${friend.id}`) + .innerJoin(currentCity, sql`${homeCity.id} = ${currentCity.id}`) + .innerJoin(subscriber, sql`${users.class} = ${subscriber.id}`) + .innerJoin(closestCity, sql`${users.currentCity} = ${closestCity.id}`) + .where(and(sql`${users.age1} > 0`, eq(cities.id, 1))) + .limit(1) + .offset(1); + +Expect< + Equal< + { + user: { + id: number; + maxAge: unknown; + }; + city: { + id: number; + }; + homeCity: { + id: number; + name: string; + population: number | null; + }; + c: { + id: number; + class: 'A' | 'C' | null; + subClass: 'B' | 'D'; + }; + otherClass: { + id: number; + class: 'A' | 'C' | null; + subClass: 'B' | 'D'; + }; + anotherClass: { + id: number; + class: 'A' | 'C' | null; + subClass: 'B' | 'D'; + }; + friend: { + id: number; + homeCity: number; + currentCity: number | null; + serialNullable: number; + serialNotNull: number; + class: 'A' | 'C'; + subClass: 'B' | 'D' | null; + text: string | null; + age1: number; + createdAt: Date; + enumCol: 'a' | 'b' | 'c'; + }; + currentCity: { + id: number; + name: string; + population: number | null; + }; + subscriber: { + id: number; + homeCity: number; + currentCity: number | null; + serialNullable: number; + serialNotNull: number; + class: 'A' | 'C'; + subClass: 'B' | 'D' | null; + text: string | null; + age1: number; + createdAt: Date; + enumCol: 'a' | 'b' | 'c'; + }; + closestCity: { + id: number; + name: string; + population: number | null; + }; + }[], + typeof megaJoin + > +>; + +const friends = alias(users, 'friends'); + +const join4 = await db + .select({ + user: { + id: users.id, + }, + city: { + id: cities.id, + }, + class: classes, + friend: friends, + }) + .from(users) + .innerJoin(cities, sql`${users.id} = ${cities.id}`) + .innerJoin(classes, sql`${cities.id} = ${classes.id}`) + .innerJoin(friends, sql`${friends.id} = ${users.id}`) + .where(sql`${users.age1} > 0`); + +Expect< + Equal<{ + user: { + id: number; + }; + city: { + id: number; + }; + class: { + id: number; + class: 'A' | 'C' | null; + subClass: 'B' | 'D'; + }; + friend: { + id: number; + homeCity: number; + currentCity: number | null; + serialNullable: number; + serialNotNull: number; + class: 'A' | 'C'; + subClass: 'B' | 'D' | null; + text: string | null; + age1: number; + createdAt: Date; + enumCol: 'a' | 'b' | 'c'; + }; + }[], typeof join4> +>; + +{ + const authenticated = false as boolean; + + const result = await db + .select({ + id: users.id, + ...(authenticated ? { city: users.homeCity } : {}), + }) + .from(users); + + Expect< + Equal< + { + id: number; + city?: number; + }[], + typeof result + > + >; +} + +await db.select().from(users).for('update'); +await db.select().from(users).for('share', { skipLocked: true }); +await db.select().from(users).for('update', { noWait: true }); +// TODO Implement views for SingleStore (https://docs.singlestore.com/cloud/reference/sql-reference/data-definition-language-ddl/create-view/) +/* await db + .select() + .from(users) + // @ts-expect-error - can't use both skipLocked and noWait + .for('share', { noWait: true, skipLocked: true }); + +{ + const result = await db.select().from(newYorkers); + Expect< + Equal< + { + userId: number; + cityId: number | null; + }[], + typeof result + > + >; +} + +{ + const result = await db.select({ userId: newYorkers.userId }).from(newYorkers); + Expect< + Equal< + { + userId: number; + }[], + typeof result + > + >; +} */ + +{ + const query = db.select().from(users).prepare().iterator(); + for await (const row of query) { + Expect>(); + } +} + +{ + db + .select() + .from(users) + .where(eq(users.id, 1)); + + db + .select() + .from(users) + .where(eq(users.id, 1)) + // @ts-expect-error - can't use where twice + .where(eq(users.id, 1)); + + db + .select() + .from(users) + .where(eq(users.id, 1)) + .limit(10) + // @ts-expect-error - can't use where twice + .where(eq(users.id, 1)); +} + +{ + function withFriends(qb: T) { + const friends = alias(users, 'friends'); + const friends2 = alias(users, 'friends2'); + const friends3 = alias(users, 'friends3'); + const friends4 = alias(users, 'friends4'); + const friends5 = alias(users, 'friends5'); + return qb + .leftJoin(friends, sql`true`) + .leftJoin(friends2, sql`true`) + .leftJoin(friends3, sql`true`) + .leftJoin(friends4, sql`true`) + .leftJoin(friends5, sql`true`); + } + + const qb = db.select().from(users).$dynamic(); + const result = await withFriends(qb); + Expect< + Equal + >; +} + +{ + function withFriends(qb: T) { + const friends = alias(users, 'friends'); + const friends2 = alias(users, 'friends2'); + const friends3 = alias(users, 'friends3'); + const friends4 = alias(users, 'friends4'); + const friends5 = alias(users, 'friends5'); + return qb + .leftJoin(friends, sql`true`) + .leftJoin(friends2, sql`true`) + .leftJoin(friends3, sql`true`) + .leftJoin(friends4, sql`true`) + .leftJoin(friends5, sql`true`); + } + + const qb = db.select().from(users).$dynamic(); + const result = await withFriends(qb); + Expect< + Equal + >; +} + +{ + function dynamic(qb: T) { + return qb.where(sql``).having(sql``).groupBy(sql``).orderBy(sql``).limit(1).offset(1).for('update'); + } + + const qb = db.select().from(users).$dynamic(); + const result = await dynamic(qb); + Expect>; +} + +{ + // TODO: add to docs + function dynamic(qb: T) { + return qb.where(sql``).having(sql``).groupBy(sql``).orderBy(sql``).limit(1).offset(1).for('update'); + } + + const query = new QueryBuilder().select().from(users).$dynamic(); + dynamic(query); +} + +{ + // TODO: add to docs + function paginated(qb: T, page: number) { + return qb.limit(10).offset((page - 1) * 10); + } + + const qb = db.select().from(users).$dynamic(); + const result = await paginated(qb, 1); + + Expect>; +} + +{ + db + .select() + .from(users) + .where(sql``) + .limit(10) + // @ts-expect-error method was already called + .where(sql``); + + db + .select() + .from(users) + .having(sql``) + .limit(10) + // @ts-expect-error method was already called + .having(sql``); + + db + .select() + .from(users) + .groupBy(sql``) + .limit(10) + // @ts-expect-error method was already called + .groupBy(sql``); + + db + .select() + .from(users) + .orderBy(sql``) + .limit(10) + // @ts-expect-error method was already called + .orderBy(sql``); + + db + .select() + .from(users) + .limit(10) + .where(sql``) + // @ts-expect-error method was already called + .limit(10); + + db + .select() + .from(users) + .offset(10) + .limit(10) + // @ts-expect-error method was already called + .offset(10); + + db + .select() + .from(users) + .for('update') + .limit(10) + // @ts-expect-error method was already called + .for('update'); +} diff --git a/drizzle-orm/type-tests/singlestore/set-operators.ts b/drizzle-orm/type-tests/singlestore/set-operators.ts new file mode 100644 index 000000000..1db4bb7f1 --- /dev/null +++ b/drizzle-orm/type-tests/singlestore/set-operators.ts @@ -0,0 +1,235 @@ +import { type Equal, Expect } from 'type-tests/utils.ts'; +import { eq } from '~/expressions.ts'; +import { intersect, type SingleStoreSetOperator, union, unionAll } from '~/singlestore-core/index.ts'; +import { sql } from '~/sql/index.ts'; +import { db } from './db.ts'; +import { cities, classes, users } from './tables.ts'; + +const unionTest = await db + .select({ id: users.id }) + .from(users) + .union( + db + .select({ id: users.id }) + .from(users), + ); + +Expect>; + +const unionAllTest = await db + .select({ id: users.id, age: users.age1 }) + .from(users) + .unionAll( + db.select({ id: users.id, age: users.age1 }) + .from(users) + .leftJoin(cities, eq(users.id, cities.id)), + ); + +Expect>; + +const intersectTest = await db + .select({ id: users.id, homeCity: users.homeCity }) + .from(users) + .intersect(({ intersect }) => + intersect( + db + .select({ id: users.id, homeCity: users.homeCity }) + .from(users), + db + .select({ id: users.id, homeCity: sql`${users.homeCity}`.mapWith(Number) }) + .from(users), + ) + ); + +Expect>; + +const exceptTest = await db + .select({ id: users.id, homeCity: users.homeCity }) + .from(users) + .except( + db + .select({ id: users.id, homeCity: sql`${users.homeCity}`.mapWith(Number) }) + .from(users), + ); + +Expect>; + +const minusTest = await db + .select({ id: users.id, homeCity: users.homeCity }) + .from(users) + .minus( + db + .select({ id: users.id, homeCity: sql`${users.homeCity}`.mapWith(Number) }) + .from(users), + ); + +Expect>; + +const union2Test = await union(db.select().from(cities), db.select().from(cities), db.select().from(cities)); + +Expect>; + +const unionAll2Test = await unionAll( + db.select({ + id: cities.id, + name: cities.name, + population: cities.population, + }).from(cities), + db.select().from(cities), +); + +Expect>; + +const intersect2Test = await intersect( + db.select({ + id: cities.id, + name: cities.name, + population: cities.population, + }).from(cities), + db.select({ + id: cities.id, + name: cities.name, + population: cities.population, + }).from(cities), + db.select({ + id: cities.id, + name: cities.name, + population: cities.population, + }).from(cities), +); + +Expect>; + +// TODO Implement views for SingleStore (https://docs.singlestore.com/cloud/reference/sql-reference/data-definition-language-ddl/create-view/) +/* const except2Test = await except( + db.select({ + userId: newYorkers.userId, + }) + .from(newYorkers), + db.select({ + userId: newYorkers.userId, + }).from(newYorkers), +); + +Expect>; */ + +const unionfull = await union(db.select().from(users), db.select().from(users)).orderBy(sql``).limit(1).offset(2); + +Expect< + Equal<{ + id: number; + text: string | null; + homeCity: number; + currentCity: number | null; + serialNullable: number; + serialNotNull: number; + class: 'A' | 'C'; + subClass: 'B' | 'D' | null; + age1: number; + createdAt: Date; + enumCol: 'a' | 'b' | 'c'; + }[], typeof unionfull> +>; + +union(db.select().from(users), db.select().from(users)) + .orderBy(sql``) + // @ts-expect-error - method was already called + .orderBy(sql``); + +union(db.select().from(users), db.select().from(users)) + .offset(1) + // @ts-expect-error - method was already called + .offset(2); + +union(db.select().from(users), db.select().from(users)) + .orderBy(sql``) + // @ts-expect-error - method was already called + .orderBy(sql``); + +{ + function dynamic(qb: T) { + return qb.orderBy(sql``).limit(1).offset(2); + } + + const qb = union(db.select().from(users), db.select().from(users)).$dynamic(); + const result = await dynamic(qb); + Expect>; +} + +await db + .select({ id: users.id, homeCity: users.homeCity }) + .from(users) + // All queries in combining statements should return the same number of columns + // and the corresponding columns should have compatible data type + // @ts-expect-error + .intersect(({ intersect }) => intersect(db.select().from(users), db.select().from(users))); + +// All queries in combining statements should return the same number of columns +// and the corresponding columns should have compatible data type +// @ts-expect-error +db.select().from(classes).union(db.select({ id: classes.id }).from(classes)); + +// All queries in combining statements should return the same number of columns +// and the corresponding columns should have compatible data type +// @ts-expect-error +db.select({ id: classes.id }).from(classes).union(db.select().from(classes).where(sql``)); + +// All queries in combining statements should return the same number of columns +// and the corresponding columns should have compatible data type +// @ts-expect-error +db.select({ id: classes.id }).from(classes).union(db.select().from(classes)); + +union( + db.select({ id: cities.id, name: cities.name }).from(cities).where(sql``), + db.select({ id: cities.id, name: cities.name }).from(cities), + // All queries in combining statements should return the same number of columns + // and the corresponding columns should have compatible data type + // @ts-expect-error + db.select().from(cities), +); + +union( + db.select({ id: cities.id, name: cities.name }).from(cities).where(sql``), + // All queries in combining statements should return the same number of columns + // and the corresponding columns should have compatible data type + // @ts-expect-error + db.select({ id: cities.id, name: cities.name, population: cities.population }).from(cities), + db.select({ id: cities.id, name: cities.name }).from(cities).where(sql``).limit(3).$dynamic(), + db.select({ id: cities.id, name: cities.name }).from(cities), +); + +union( + db.select({ id: cities.id }).from(cities), + db.select({ id: cities.id }).from(cities), + db.select({ id: cities.id }).from(cities), + // All queries in combining statements should return the same number of columns + // and the corresponding columns should have compatible data type + // @ts-expect-error + db.select({ id: cities.id, name: cities.name }).from(cities), + db.select({ id: cities.id }).from(cities), + db.select({ id: cities.id }).from(cities), +); + +union( + db.select({ id: cities.id }).from(cities), + db.select({ id: cities.id }).from(cities), + // All queries in combining statements should return the same number of columns + // and the corresponding columns should have compatible data type + // @ts-expect-error + db.select({ id: cities.id, name: cities.name }).from(cities), + db.select({ id: cities.id }).from(cities), + /* db.select({ id: newYorkers.userId }).from(newYorkers), */ + db.select({ id: cities.id }).from(cities), +); + +union( + db.select({ id: cities.id }).from(cities), + db.select({ id: cities.id }).from(cities), + db.select({ id: cities.id }).from(cities).where(sql``), + db.select({ id: sql`${cities.id}` }).from(cities), + db.select({ id: cities.id }).from(cities), + // All queries in combining statements should return the same number of columns + // and the corresponding columns should have compatible data type + // @ts-expect-error + db.select({ id: cities.id, name: cities.name, population: cities.population }).from(cities).where(sql``), +); diff --git a/drizzle-orm/type-tests/singlestore/subquery.ts b/drizzle-orm/type-tests/singlestore/subquery.ts new file mode 100644 index 000000000..e8ee4e80b --- /dev/null +++ b/drizzle-orm/type-tests/singlestore/subquery.ts @@ -0,0 +1,97 @@ +import { Expect } from 'type-tests/utils.ts'; +import { and, eq } from '~/expressions.ts'; +import { alias, int, serial, singlestoreTable, text } from '~/singlestore-core/index.ts'; +import { sql } from '~/sql/sql.ts'; +import type { DrizzleTypeError, Equal } from '~/utils.ts'; +import { db } from './db.ts'; + +const names = singlestoreTable('names', { + id: serial('id').primaryKey(), + name: text('name'), + authorId: int('author_id'), +}); + +const n1 = db + .select({ + id: names.id, + name: names.name, + authorId: names.authorId, + count1: sql`count(1)::int`.as('count1'), + }) + .from(names) + .groupBy(names.id, names.name, names.authorId) + .as('n1'); + +const n2 = db + .select({ + id: names.id, + authorId: names.authorId, + totalCount: sql`count(1)::int`.as('totalCount'), + }) + .from(names) + .groupBy(names.id, names.authorId) + .as('n2'); + +const result = await db + .select({ + name: n1.name, + authorId: n1.authorId, + count1: n1.count1, + totalCount: n2.totalCount, + }) + .from(n1) + .innerJoin(n2, and(eq(n2.id, n1.id), eq(n2.authorId, n1.authorId))); + +Expect< + Equal< + { + name: string | null; + authorId: number | null; + count1: number; + totalCount: number; + }[], + typeof result + > +>; + +const names2 = alias(names, 'names2'); + +const sq1 = db + .select({ + id: names.id, + name: names.name, + id2: names2.id, + }) + .from(names) + .leftJoin(names2, eq(names.name, names2.name)) + .as('sq1'); + +const res = await db.select().from(sq1); + +Expect< + Equal< + { + id: number; + name: string | null; + id2: number | null; + }[], + typeof res + > +>; + +{ + const sq = db.select({ count: sql`count(1)::int` }).from(names).as('sq'); + Expect ? true : false>; +} + +const sqUnion = db.select().from(names).union(db.select().from(names2)).as('sqUnion'); + +const resUnion = await db.select().from(sqUnion); + +Expect< + Equal<{ + id: number; + name: string | null; + authorId: number | null; + }[], typeof resUnion> +>; diff --git a/drizzle-orm/type-tests/singlestore/tables.ts b/drizzle-orm/type-tests/singlestore/tables.ts new file mode 100644 index 000000000..43a1b05dc --- /dev/null +++ b/drizzle-orm/type-tests/singlestore/tables.ts @@ -0,0 +1,1009 @@ +import { type Equal, Expect } from 'type-tests/utils.ts'; +import type { BuildColumn } from '~/column-builder.ts'; +import { eq } from '~/expressions.ts'; +import { + bigint, + binary, + boolean, + char, + customType, + date, + datetime, + decimal, + double, + float, + index, + int, + json, + longtext, + mediumint, + mediumtext, + primaryKey, + real, + serial, + type SingleStoreColumn, + singlestoreEnum, + singlestoreTable, + smallint, + text, + time, + timestamp, + tinyint, + tinytext, + unique, + uniqueIndex, + varbinary, + varchar, + year, +} from '~/singlestore-core/index.ts'; +import { singlestoreSchema } from '~/singlestore-core/schema.ts'; +/* import { singlestoreView, type SingleStoreViewWithSelection } from '~/singlestore-core/view.ts'; */ +import type { InferSelectModel } from '~/table.ts'; +import type { Simplify } from '~/utils.ts'; +import { db } from './db.ts'; + +export const users = singlestoreTable( + 'users_table', + { + id: serial('id').primaryKey(), + homeCity: int('home_city') + .notNull(), + currentCity: int('current_city'), + serialNullable: serial('serial1'), + serialNotNull: serial('serial2').notNull(), + class: text('class', { enum: ['A', 'C'] }).notNull(), + subClass: text('sub_class', { enum: ['B', 'D'] }), + text: text('text'), + age1: int('age1').notNull(), + createdAt: timestamp('created_at', { mode: 'date' }).notNull().defaultNow(), + enumCol: singlestoreEnum('enum_col', ['a', 'b', 'c']).notNull(), + }, + (users) => ({ + usersAge1Idx: uniqueIndex('usersAge1Idx').on(users.class), + usersAge2Idx: index('usersAge2Idx').on(users.class), + uniqueClass: uniqueIndex('uniqueClass') + .on(users.class, users.subClass) + .lock('default') + .algorythm('copy') + .using(`btree`), + pk: primaryKey(users.age1, users.class), + }), +); + +export const cities = singlestoreTable('cities_table', { + id: serial('id').primaryKey(), + name: text('name_db').notNull(), + population: int('population').default(0), +}, (cities) => ({ + citiesNameIdx: index('citiesNameIdx').on(cities.id), +})); + +Expect< + Equal< + { + id: SingleStoreColumn<{ + name: 'id'; + tableName: 'cities_table'; + dataType: 'number'; + columnType: 'SingleStoreSerial'; + data: number; + driverParam: number; + notNull: true; + hasDefault: true; + isPrimaryKey: true; + isAutoincrement: true; + hasRuntimeDefault: false; + enumValues: undefined; + baseColumn: never; + identity: undefined; + generated: undefined; + }, object>; + name: SingleStoreColumn<{ + name: 'name_db'; + tableName: 'cities_table'; + dataType: 'string'; + columnType: 'SingleStoreText'; + data: string; + driverParam: string; + notNull: true; + hasDefault: false; + isPrimaryKey: false; + isAutoincrement: false; + hasRuntimeDefault: false; + enumValues: [string, ...string[]]; + baseColumn: never; + identity: undefined; + generated: undefined; + }, object>; + population: SingleStoreColumn<{ + name: 'population'; + tableName: 'cities_table'; + dataType: 'number'; + columnType: 'SingleStoreInt'; + data: number; + driverParam: string | number; + notNull: false; + hasDefault: true; + isPrimaryKey: false; + isAutoincrement: false; + hasRuntimeDefault: false; + enumValues: undefined; + baseColumn: never; + identity: undefined; + generated: undefined; + }, object>; + }, + typeof cities._.columns + > +>; + +Expect< + Equal<{ + id: number; + name_db: string; + population: number | null; + }, InferSelectModel> +>; + +Expect< + Equal<{ + id?: number; + name: string; + population?: number | null; + }, typeof cities.$inferInsert> +>; + +export const customSchema = singlestoreSchema('custom_schema'); + +export const citiesCustom = customSchema.table('cities_table', { + id: serial('id').primaryKey(), + name: text('name_db').notNull(), + population: int('population').default(0), +}, (cities) => ({ + citiesNameIdx: index('citiesNameIdx').on(cities.id), +})); + +Expect>; + +export const classes = singlestoreTable('classes_table', ({ serial, text }) => ({ + id: serial('id').primaryKey(), + class: text('class', { enum: ['A', 'C'] }), + subClass: text('sub_class', { enum: ['B', 'D'] }).notNull(), +})); + +// TODO Implement views for SingleStore (https://docs.singlestore.com/cloud/reference/sql-reference/data-definition-language-ddl/create-view/) +/* export const classes2 = singlestoreTable('classes_table', { + id: serial().primaryKey(), + class: text({ enum: ['A', 'C'] }).$dbName('class_db'), + subClass: text({ enum: ['B', 'D'] }).notNull(), +}); */ + +/* export const newYorkers = singlestoreView('new_yorkers') + .algorithm('merge') + .sqlSecurity('definer') + .as((qb) => { + const sq = qb + .$with('sq') + .as( + qb.select({ userId: users.id, cityId: cities.id }) + .from(users) + .leftJoin(cities, eq(cities.id, users.homeCity)) + .where(sql`${users.age1} > 18`), + ); + return qb.with(sq).select().from(sq).where(sql`${users.homeCity} = 1`); + }); + +Expect< + Equal< + SingleStoreViewWithSelection<'new_yorkers', false, { + userId: SingleStoreColumn<{ + name: 'id'; + dataType: 'number'; + columnType: 'SingleStoreSerial'; + data: number; + driverParam: number; + notNull: true; + hasDefault: true; + tableName: 'new_yorkers'; + enumValues: undefined; + baseColumn: never; + generated: undefined; + isPrimaryKey: true; + isAutoincrement: true; + hasRuntimeDefault: false; + }>; + cityId: SingleStoreColumn<{ + name: 'id'; + dataType: 'number'; + columnType: 'SingleStoreSerial'; + data: number; + driverParam: number; + notNull: false; + hasDefault: true; + tableName: 'new_yorkers'; + enumValues: undefined; + baseColumn: never; + generated: undefined; + isPrimaryKey: true; + isAutoincrement: true; + hasRuntimeDefault: false; + }>; + }>, + typeof newYorkers + > +>; + +{ + const newYorkers = customSchema.view('new_yorkers') + .algorithm('merge') + .sqlSecurity('definer') + .as((qb) => { + const sq = qb + .$with('sq') + .as( + qb.select({ userId: users.id, cityId: cities.id }) + .from(users) + .leftJoin(cities, eq(cities.id, users.homeCity)) + .where(sql`${users.age1} > 18`), + ); + return qb.with(sq).select().from(sq).where(sql`${users.homeCity} = 1`); + }); + + Expect< + Equal< + SingleStoreViewWithSelection<'new_yorkers', false, { + userId: SingleStoreColumn<{ + name: 'id'; + dataType: 'number'; + columnType: 'SingleStoreSerial'; + data: number; + driverParam: number; + notNull: true; + hasDefault: true; + tableName: 'new_yorkers'; + enumValues: undefined; + baseColumn: never; + generated: undefined; + isPrimaryKey: true; + isAutoincrement: true; + hasRuntimeDefault: false; + }>; + cityId: SingleStoreColumn<{ + name: 'id'; + dataType: 'number'; + columnType: 'SingleStoreSerial'; + data: number; + driverParam: number; + notNull: false; + hasDefault: true; + tableName: 'new_yorkers'; + enumValues: undefined; + baseColumn: never; + generated: undefined; + isPrimaryKey: true; + isAutoincrement: true; + hasRuntimeDefault: false; + }>; + }>, + typeof newYorkers + > + >; +} + +{ + const newYorkers = singlestoreView('new_yorkers', { + userId: int('user_id').notNull(), + cityId: int('city_id'), + }) + .algorithm('merge') + .sqlSecurity('definer') + .as( + sql`select ${users.id} as user_id, ${cities.id} as city_id from ${users} left join ${cities} on ${ + eq(cities.id, users.homeCity) + } where ${gt(users.age1, 18)}`, + ); + + Expect< + Equal< + SingleStoreViewWithSelection<'new_yorkers', false, { + userId: SingleStoreColumn<{ + name: 'user_id'; + dataType: 'number'; + columnType: 'SingleStoreInt'; + data: number; + driverParam: string | number; + hasDefault: false; + notNull: true; + tableName: 'new_yorkers'; + enumValues: undefined; + baseColumn: never; + generated: undefined; + isPrimaryKey: false; + isAutoincrement: false; + hasRuntimeDefault: false; + }>; + cityId: SingleStoreColumn<{ + name: 'city_id'; + notNull: false; + hasDefault: false; + dataType: 'number'; + columnType: 'SingleStoreInt'; + data: number; + driverParam: string | number; + tableName: 'new_yorkers'; + enumValues: undefined; + baseColumn: never; + generated: undefined; + isPrimaryKey: false; + isAutoincrement: false; + hasRuntimeDefault: false; + }>; + }>, + typeof newYorkers + > + >; +} + +{ + const newYorkers = customSchema.view('new_yorkers', { + userId: int('user_id').notNull(), + cityId: int('city_id'), + }) + .algorithm('merge') + .sqlSecurity('definer') + .as( + sql`select ${users.id} as user_id, ${cities.id} as city_id from ${users} left join ${cities} on ${ + eq(cities.id, users.homeCity) + } where ${gt(users.age1, 18)}`, + ); + + Expect< + Equal< + SingleStoreViewWithSelection<'new_yorkers', false, { + userId: SingleStoreColumn<{ + name: 'user_id'; + dataType: 'number'; + columnType: 'SingleStoreInt'; + data: number; + driverParam: string | number; + hasDefault: false; + notNull: true; + tableName: 'new_yorkers'; + enumValues: undefined; + baseColumn: never; + generated: undefined; + isPrimaryKey: false; + isAutoincrement: false; + hasRuntimeDefault: false; + }>; + cityId: SingleStoreColumn<{ + name: 'city_id'; + notNull: false; + hasDefault: false; + dataType: 'number'; + columnType: 'SingleStoreInt'; + data: number; + driverParam: string | number; + tableName: 'new_yorkers'; + enumValues: undefined; + baseColumn: never; + generated: undefined; + isPrimaryKey: false; + isAutoincrement: false; + hasRuntimeDefault: false; + }>; + }>, + typeof newYorkers + > + >; +} + +{ + const newYorkers = singlestoreView('new_yorkers', { + userId: int('user_id').notNull(), + cityId: int('city_id'), + }).existing(); + + Expect< + Equal< + SingleStoreViewWithSelection<'new_yorkers', true, { + userId: SingleStoreColumn<{ + name: 'user_id'; + dataType: 'number'; + columnType: 'SingleStoreInt'; + data: number; + driverParam: string | number; + hasDefault: false; + notNull: true; + tableName: 'new_yorkers'; + enumValues: undefined; + baseColumn: never; + generated: undefined; + isPrimaryKey: false; + isAutoincrement: false; + hasRuntimeDefault: false; + }>; + cityId: SingleStoreColumn<{ + name: 'city_id'; + notNull: false; + hasDefault: false; + dataType: 'number'; + columnType: 'SingleStoreInt'; + data: number; + driverParam: string | number; + tableName: 'new_yorkers'; + enumValues: undefined; + baseColumn: never; + generated: undefined; + isPrimaryKey: false; + isAutoincrement: false; + hasRuntimeDefault: false; + }>; + }>, + typeof newYorkers + > + >; +} + +{ + const newYorkers = customSchema.view('new_yorkers', { + userId: int('user_id').notNull(), + cityId: int('city_id'), + }).existing(); + + Expect< + Equal< + SingleStoreViewWithSelection<'new_yorkers', true, { + userId: SingleStoreColumn<{ + name: 'user_id'; + dataType: 'number'; + columnType: 'SingleStoreInt'; + data: number; + driverParam: string | number; + hasDefault: false; + notNull: true; + tableName: 'new_yorkers'; + enumValues: undefined; + baseColumn: never; + generated: undefined; + isPrimaryKey: false; + isAutoincrement: false; + hasRuntimeDefault: false; + }>; + cityId: SingleStoreColumn<{ + name: 'city_id'; + notNull: false; + hasDefault: false; + dataType: 'number'; + columnType: 'SingleStoreInt'; + data: number; + driverParam: string | number; + tableName: 'new_yorkers'; + enumValues: undefined; + baseColumn: never; + generated: undefined; + isPrimaryKey: false; + isAutoincrement: false; + hasRuntimeDefault: false; + }>; + }>, + typeof newYorkers + > + >; +} */ + +{ + const customText = customType<{ data: string }>({ + dataType() { + return 'text'; + }, + }); + + const t = customText('name').notNull(); + Expect< + Equal< + { + name: 'name'; + tableName: 'table'; + dataType: 'custom'; + columnType: 'SingleStoreCustomColumn'; + data: string; + driverParam: unknown; + notNull: true; + hasDefault: false; + isPrimaryKey: false; + isAutoincrement: false; + hasRuntimeDefault: false; + enumValues: undefined; + baseColumn: never; + identity: undefined; + generated: undefined; + brand: 'Column'; + dialect: 'singlestore'; + }, + Simplify['_']> + > + >; +} + +{ + singlestoreTable('test', { + bigint: bigint('bigint', { mode: 'bigint' }), + number: bigint('number', { mode: 'number' }), + date: date('date').default(new Date()), + date2: date('date2', { mode: 'date' }).default(new Date()), + date3: date('date3', { mode: 'string' }).default('2020-01-01'), + date4: date('date4', { mode: undefined }).default(new Date()), + datetime: datetime('datetime').default(new Date()), + datetime2: datetime('datetime2', { mode: 'date' }).default(new Date()), + datetime3: datetime('datetime3', { mode: 'string' }).default('2020-01-01'), + datetime4: datetime('datetime4', { mode: undefined }).default(new Date()), + timestamp: timestamp('timestamp').default(new Date()), + timestamp2: timestamp('timestamp2', { mode: 'date' }).default(new Date()), + timestamp3: timestamp('timestamp3', { mode: 'string' }).default('2020-01-01'), + timestamp4: timestamp('timestamp4', { mode: undefined }).default(new Date()), + }); +} + +{ + singlestoreTable('test', { + col1: decimal('col1').default('1'), + }); +} + +{ + const test = singlestoreTable('test', { + test1: singlestoreEnum('test', ['a', 'b', 'c'] as const).notNull(), + test2: singlestoreEnum('test', ['a', 'b', 'c']).notNull(), + test3: varchar('test', { length: 255, enum: ['a', 'b', 'c'] as const }).notNull(), + test4: varchar('test', { length: 255, enum: ['a', 'b', 'c'] }).notNull(), + test5: text('test', { enum: ['a', 'b', 'c'] as const }).notNull(), + test6: text('test', { enum: ['a', 'b', 'c'] }).notNull(), + test7: tinytext('test', { enum: ['a', 'b', 'c'] as const }).notNull(), + test8: tinytext('test', { enum: ['a', 'b', 'c'] }).notNull(), + test9: mediumtext('test', { enum: ['a', 'b', 'c'] as const }).notNull(), + test10: mediumtext('test', { enum: ['a', 'b', 'c'] }).notNull(), + test11: longtext('test', { enum: ['a', 'b', 'c'] as const }).notNull(), + test12: longtext('test', { enum: ['a', 'b', 'c'] }).notNull(), + test13: char('test', { enum: ['a', 'b', 'c'] as const }).notNull(), + test14: char('test', { enum: ['a', 'b', 'c'] }).notNull(), + test15: text('test').notNull(), + }); + Expect>; + Expect>; + Expect>; + Expect>; + Expect>; + Expect>; + Expect>; + Expect>; + Expect>; + Expect>; + Expect>; + Expect>; + Expect>; + Expect>; + Expect>; +} + +/* { // All types with generated columns + const test = singlestoreTable('test', { + test1: singlestoreEnum('test', ['a', 'b', 'c'] as const).generatedAlwaysAs(sql``), + test2: singlestoreEnum('test', ['a', 'b', 'c']).generatedAlwaysAs(sql``), + test3: varchar('test', { length: 255, enum: ['a', 'b', 'c'] as const }).generatedAlwaysAs(sql``), + test4: varchar('test', { length: 255, enum: ['a', 'b', 'c'] }).generatedAlwaysAs(sql``), + test5: text('test', { enum: ['a', 'b', 'c'] as const }).generatedAlwaysAs(sql``), + test6: text('test', { enum: ['a', 'b', 'c'] }).generatedAlwaysAs(sql``), + test7: tinytext('test', { enum: ['a', 'b', 'c'] as const }).generatedAlwaysAs(sql``), + test8: tinytext('test', { enum: ['a', 'b', 'c'] }).generatedAlwaysAs(sql``), + test9: mediumtext('test', { enum: ['a', 'b', 'c'] as const }).generatedAlwaysAs(sql``), + test10: mediumtext('test', { enum: ['a', 'b', 'c'] }).generatedAlwaysAs(sql``), + test11: longtext('test', { enum: ['a', 'b', 'c'] as const }).generatedAlwaysAs(sql``), + test12: longtext('test', { enum: ['a', 'b', 'c'] }).generatedAlwaysAs(sql``), + test13: char('test', { enum: ['a', 'b', 'c'] as const }).generatedAlwaysAs(sql``), + test14: char('test', { enum: ['a', 'b', 'c'] }).generatedAlwaysAs(sql``), + test15: text('test').generatedAlwaysAs(sql``), + }); + Expect>; + Expect>; + Expect>; + Expect>; + Expect>; + Expect>; + Expect>; + Expect>; + Expect>; + Expect>; + Expect>; + Expect>; + Expect>; + Expect>; + Expect>; +} */ + +{ + const getUsersTable = (schemaName: TSchema) => { + return singlestoreSchema(schemaName).table('users', { + id: int('id').primaryKey(), + name: text('name').notNull(), + }); + }; + + const users1 = getUsersTable('id1'); + Expect>; + + const users2 = getUsersTable('id2'); + Expect>; +} + +{ + const internalStaff = singlestoreTable('internal_staff', { + userId: int('user_id').notNull(), + }); + + const customUser = singlestoreTable('custom_user', { + id: int('id').notNull(), + }); + + const ticket = singlestoreTable('ticket', { + staffId: int('staff_id').notNull(), + }); + + const subq = db + .select() + .from(internalStaff) + .leftJoin( + customUser, + eq(internalStaff.userId, customUser.id), + ).as('internal_staff'); + + const mainQuery = await db + .select() + .from(ticket) + .leftJoin(subq, eq(subq.internal_staff.userId, ticket.staffId)); + + Expect< + Equal<{ + internal_staff: { + internal_staff: { + userId: number; + }; + custom_user: { + id: number | null; + }; + } | null; + ticket: { + staffId: number; + }; + }[], typeof mainQuery> + >; +} + +// TODO Implement views for SingleStore (https://docs.singlestore.com/cloud/reference/sql-reference/data-definition-language-ddl/create-view/) +/* { + const newYorkers = singlestoreView('new_yorkers') + .as((qb) => { + const sq = qb + .$with('sq') + .as( + qb.select({ userId: users.id, cityId: cities.id }) + .from(users) + .leftJoin(cities, eq(cities.id, users.homeCity)) + .where(sql`${users.age1} > 18`), + ); + return qb.with(sq).select().from(sq).where(sql`${users.homeCity} = 1`); + }); + + await db.select().from(newYorkers).leftJoin(newYorkers, eq(newYorkers.userId, newYorkers.userId)); +} */ + +{ + const test = singlestoreTable('test', { + id: text('id').$defaultFn(() => crypto.randomUUID()).primaryKey(), + }); + + Expect< + Equal<{ + id?: string; + }, typeof test.$inferInsert> + >; +} + +{ + singlestoreTable('test', { + id: int('id').$default(() => 1), + id2: int('id').$defaultFn(() => 1), + // @ts-expect-error - should be number + id3: int('id').$default(() => '1'), + // @ts-expect-error - should be number + id4: int('id').$defaultFn(() => '1'), + }); +} +{ + const emailLog = singlestoreTable( + 'email_log', + { + id: int('id', { unsigned: true }).autoincrement().notNull(), + clientId: int('id_client', { unsigned: true }), + receiverEmail: varchar('receiver_email', { length: 255 }).notNull(), + messageId: varchar('message_id', { length: 255 }), + contextId: int('context_id', { unsigned: true }), + contextType: singlestoreEnum('context_type', ['test']).$type<['test']>(), + action: varchar('action', { length: 80 }).$type<['test']>(), + events: json('events').$type<{ t: 'test' }[]>(), + createdAt: timestamp('created_at', { mode: 'string' }).defaultNow().notNull(), + updatedAt: timestamp('updated_at', { mode: 'string' }).defaultNow().onUpdateNow(), + }, + (table) => { + return { + emailLogId: primaryKey({ columns: [table.id], name: 'email_log_id' }), + emailLogMessageIdUnique: unique('email_log_message_id_unique').on(table.messageId), + }; + }, + ); + + Expect< + Equal<{ + receiverEmail: string; + id?: number | undefined; + createdAt?: string | undefined; + clientId?: number | null | undefined; + messageId?: string | null | undefined; + contextId?: number | null | undefined; + contextType?: ['test'] | null | undefined; + action?: ['test'] | null | undefined; + events?: + | { + t: 'test'; + }[] + | null + | undefined; + updatedAt?: string | null | undefined; + }, typeof emailLog.$inferInsert> + >; +} + +{ + const customRequiredConfig = customType<{ + data: string; + driverData: string; + config: { length: number }; + configRequired: true; + }>({ + dataType(config) { + Expect>; + return `varchar(${config.length})`; + }, + + toDriver(value) { + Expect>(); + return value; + }, + + fromDriver(value) { + Expect>(); + return value; + }, + }); + + customRequiredConfig('t', { length: 10 }); + customRequiredConfig({ length: 10 }); + // @ts-expect-error - config is required + customRequiredConfig('t'); + // @ts-expect-error - config is required + customRequiredConfig(); +} + +{ + const customOptionalConfig = customType<{ + data: string; + driverData: string; + config: { length: number }; + }>({ + dataType(config) { + Expect>; + return config ? `varchar(${config.length})` : `text`; + }, + + toDriver(value) { + Expect>(); + return value; + }, + + fromDriver(value) { + Expect>(); + return value; + }, + }); + + customOptionalConfig('t', { length: 10 }); + customOptionalConfig('t'); + customOptionalConfig({ length: 10 }); + customOptionalConfig(); +} + +{ + singlestoreTable('all_columns', { + bigint: bigint('bigint', { mode: 'number' }), + bigint2: bigint('bigint', { mode: 'number', unsigned: true }), + bigintdef: bigint('bigintdef', { mode: 'number' }).default(0), + binary: binary('binary'), + binary1: binary('binary1', { length: 1 }), + binarydef: binary('binarydef').default(''), + boolean: boolean('boolean'), + booleandef: boolean('booleandef').default(false), + char: char('char'), + char2: char('char2', { length: 1 }), + char3: char('char3', { enum: ['a', 'b', 'c'] }), + char4: char('char4', { length: 1, enum: ['a', 'b', 'c'] }), + chardef: char('chardef').default(''), + date: date('date'), + date2: date('date2', { mode: 'string' }), + datedef: date('datedef').default(new Date()), + datetime: datetime('datetime'), + datetime2: datetime('datetime2', { mode: 'string' }), + datetimedef: datetime('datetimedef').default(new Date()), + decimal: decimal('decimal'), + decimal2: decimal('decimal2', { precision: 10 }), + decimal3: decimal('decimal3', { scale: 2 }), + decimal4: decimal('decimal4', { precision: 10, scale: 2 }), + decimaldef: decimal('decimaldef').default('0'), + double: double('double'), + double2: double('double2', { precision: 10 }), + double3: double('double3', { scale: 2 }), + double4: double('double4', { precision: 10, scale: 2 }), + doubledef: double('doubledef').default(0), + enum: singlestoreEnum('enum', ['a', 'b', 'c']), + enumdef: singlestoreEnum('enumdef', ['a', 'b', 'c']).default('a'), + float: float('float'), + float2: float('float2', { precision: 10 }), + float3: float('float3', { scale: 2 }), + float4: float('float4', { precision: 10, scale: 2 }), + floatdef: float('floatdef').default(0), + int: int('int'), + int2: int('int2', { unsigned: true }), + intdef: int('intdef').default(0), + json: json('json'), + jsondef: json('jsondef').default({}), + mediumint: mediumint('mediumint'), + mediumint2: mediumint('mediumint2', { unsigned: true }), + mediumintdef: mediumint('mediumintdef').default(0), + real: real('real'), + real2: real('real2', { precision: 10 }), + real3: real('real3', { scale: 2 }), + real4: real('real4', { precision: 10, scale: 2 }), + realdef: real('realdef').default(0), + serial: serial('serial'), + serialdef: serial('serialdef').default(0), + smallint: smallint('smallint'), + smallint2: smallint('smallint2', { unsigned: true }), + smallintdef: smallint('smallintdef').default(0), + text: text('text'), + text2: text('text2', { enum: ['a', 'b', 'c'] }), + textdef: text('textdef').default(''), + tinytext: tinytext('tinytext'), + tinytext2: tinytext('tinytext2', { enum: ['a', 'b', 'c'] }), + tinytextdef: tinytext('tinytextdef').default(''), + mediumtext: mediumtext('mediumtext'), + mediumtext2: mediumtext('mediumtext2', { enum: ['a', 'b', 'c'] }), + mediumtextdef: mediumtext('mediumtextdef').default(''), + longtext: longtext('longtext'), + longtext2: longtext('longtext2', { enum: ['a', 'b', 'c'] }), + longtextdef: longtext('longtextdef').default(''), + time: time('time'), + timedef: time('timedef').default('00:00:00'), + timestamp: timestamp('timestamp'), + timestamp2: timestamp('timestamp2', { mode: 'string' }), + timestamp3: timestamp('timestamp3', { mode: 'string' }), + timestamp4: timestamp('timestamp4'), + timestampdef: timestamp('timestampdef').default(new Date()), + tinyint: tinyint('tinyint'), + tinyint2: tinyint('tinyint2', { unsigned: true }), + tinyintdef: tinyint('tinyintdef').default(0), + varbinary: varbinary('varbinary', { length: 1 }), + varbinarydef: varbinary('varbinarydef', { length: 1 }).default(''), + varchar: varchar('varchar', { length: 1 }), + varchar2: varchar('varchar2', { length: 1, enum: ['a', 'b', 'c'] }), + varchardef: varchar('varchardef', { length: 1 }).default(''), + year: year('year'), + yeardef: year('yeardef').default(0), + }); +} + +{ + const keysAsColumnNames = singlestoreTable('test', { + id: int(), + name: text(), + }); + + Expect>; + Expect>; +} + +{ + singlestoreTable('all_columns_without_name', { + bigint: bigint({ mode: 'number' }), + bigint2: bigint({ mode: 'number', unsigned: true }), + bigintdef: bigint({ mode: 'number' }).default(0), + binary: binary(), + binrary1: binary({ length: 1 }), + binarydef: binary().default(''), + boolean: boolean(), + booleandef: boolean().default(false), + char: char(), + char2: char({ length: 1 }), + char3: char({ enum: ['a', 'b', 'c'] }), + char4: char({ length: 1, enum: ['a', 'b', 'c'] }), + chardef: char().default(''), + date: date(), + date2: date({ mode: 'string' }), + datedef: date('datedef').default(new Date()), + datetime: datetime(), + datetime2: datetime({ mode: 'string' }), + datetimedef: datetime('datetimedef').default(new Date()), + decimal: decimal(), + decimal2: decimal({ precision: 10 }), + decimal3: decimal({ scale: 2 }), + decimal4: decimal({ precision: 10, scale: 2 }), + decimaldef: decimal('decimaldef').default('0'), + double: double(), + double2: double({ precision: 10 }), + double3: double({ scale: 2 }), + double4: double({ precision: 10, scale: 2 }), + doubledef: double().default(0), + enum: singlestoreEnum(['a', 'b', 'c']), + enumdef: singlestoreEnum(['a', 'b', 'c']).default('a'), + float: float(), + float2: float({ precision: 10 }), + float3: float({ scale: 2 }), + float4: float({ precision: 10, scale: 2 }), + floatdef: float().default(0), + int: int(), + int2: int({ unsigned: true }), + intdef: int().default(0), + json: json(), + jsondef: json().default({}), + mediumint: mediumint(), + mediumint2: mediumint({ unsigned: true }), + mediumintdef: mediumint().default(0), + real: real(), + real2: real({ precision: 10 }), + real3: real({ scale: 2 }), + real4: real({ precision: 10, scale: 2 }), + realdef: real().default(0), + serial: serial(), + serialdef: serial().default(0), + smallint: smallint(), + smallint2: smallint({ unsigned: true }), + smallintdef: smallint().default(0), + text: text(), + text2: text({ enum: ['a', 'b', 'c'] }), + textdef: text().default(''), + tinytext: tinytext(), + tinytext2: tinytext({ enum: ['a', 'b', 'c'] }), + tinytextdef: tinytext().default(''), + mediumtext: mediumtext(), + mediumtext2: mediumtext({ enum: ['a', 'b', 'c'] }), + mediumtextdef: mediumtext().default(''), + longtext: longtext(), + longtext2: longtext({ enum: ['a', 'b', 'c'] }), + longtextdef: longtext().default(''), + time: time(), + timedef: time().default('00:00:00'), + timestamp: timestamp(), + timestamp2: timestamp({ mode: 'string' }), + timestamp3: timestamp({ mode: 'string' }), + timestamp4: timestamp(), + timestampdef: timestamp().default(new Date()), + tinyint: tinyint(), + tinyint2: tinyint({ unsigned: true }), + tinyintdef: tinyint().default(0), + varbinary: varbinary({ length: 1 }), + varbinarydef: varbinary({ length: 1 }).default(''), + varchar: varchar({ length: 1 }), + varchar2: varchar({ length: 1, enum: ['a', 'b', 'c'] }), + varchardef: varchar({ length: 1 }).default(''), + year: year(), + yeardef: year().default(0), + }); +} diff --git a/drizzle-orm/type-tests/singlestore/update.ts b/drizzle-orm/type-tests/singlestore/update.ts new file mode 100644 index 000000000..4fb5497cf --- /dev/null +++ b/drizzle-orm/type-tests/singlestore/update.ts @@ -0,0 +1,30 @@ +import { type Equal, Expect } from 'type-tests/utils.ts'; +import type { SingleStoreUpdate } from '~/singlestore-core/index.ts'; +import type { SingleStoreRawQueryResult } from '~/singlestore/session.ts'; +import { sql } from '~/sql/sql.ts'; +import { db } from './db.ts'; +import { users } from './tables.ts'; + +{ + function dynamic(qb: T) { + return qb.where(sql``); + } + + const qbBase = db.update(users).set({}).$dynamic(); + const qb = dynamic(qbBase); + const result = await qb; + Expect>; +} + +{ + db + .update(users) + .set({}) + .where(sql``) + // @ts-expect-error method was already called + .where(sql``); +} + +{ + db.update(users).set({}).where(sql``).limit(1).orderBy(sql``); +} diff --git a/drizzle-orm/type-tests/singlestore/with.ts b/drizzle-orm/type-tests/singlestore/with.ts new file mode 100644 index 000000000..4233fbbf1 --- /dev/null +++ b/drizzle-orm/type-tests/singlestore/with.ts @@ -0,0 +1,80 @@ +import type { Equal } from 'type-tests/utils.ts'; +import { Expect } from 'type-tests/utils.ts'; +import { gt, inArray } from '~/expressions.ts'; +import { int, serial, singlestoreTable, text } from '~/singlestore-core/index.ts'; +import { sql } from '~/sql/sql.ts'; +import { db } from './db.ts'; + +const orders = singlestoreTable('orders', { + id: serial('id').primaryKey(), + region: text('region').notNull(), + product: text('product').notNull(), + amount: int('amount').notNull(), + quantity: int('quantity').notNull(), + /* generated: text('generatedText').generatedAlwaysAs(sql``), */ +}); + +{ + const regionalSales = db + .$with('regional_sales') + .as( + db + .select({ + region: orders.region, + totalSales: sql`sum(${orders.amount})`.as('total_sales'), + }) + .from(orders) + .groupBy(orders.region), + ); + + const topRegions = db + .$with('top_regions') + .as( + db + .select({ + region: orders.region, + totalSales: orders.amount, + }) + .from(regionalSales) + .where( + gt( + regionalSales.totalSales, + db.select({ sales: sql`sum(${regionalSales.totalSales})/10` }).from(regionalSales), + ), + ), + ); + + const result = await db + .with(regionalSales, topRegions) + .select({ + region: orders.region, + product: orders.product, + productUnits: sql`sum(${orders.quantity})`, + productSales: sql`sum(${orders.amount})`, + }) + .from(orders) + .where(inArray(orders.region, db.select({ region: topRegions.region }).from(topRegions))); + + Expect< + Equal<{ + region: string; + product: string; + productUnits: number; + productSales: number; + }[], typeof result> + >; + + const allOrdersWith = db.$with('all_orders_with').as(db.select().from(orders)); + const allFromWith = await db.with(allOrdersWith).select().from(allOrdersWith); + + Expect< + Equal<{ + id: number; + region: string; + product: string; + amount: number; + quantity: number; + /* generated: string | null; */ + }[], typeof allFromWith> + >; +} diff --git a/integration-tests/.env.example b/integration-tests/.env.example index ceff7d132..cad737330 100644 --- a/integration-tests/.env.example +++ b/integration-tests/.env.example @@ -1,5 +1,6 @@ PG_CONNECTION_STRING="postgres://postgres:postgres@localhost:55432/postgres" MYSQL_CONNECTION_STRING="mysql://root:mysql@127.0.0.1:33306/drizzle" +SINGLESTORE_CONNECTION_STRING="singlestore://root:singlestore@localhost:3306/drizzle" PLANETSCALE_CONNECTION_STRING= TIDB_CONNECTION_STRING= NEON_CONNECTION_STRING= diff --git a/integration-tests/drizzle2/singlestore/0000_nostalgic_carnage.sql b/integration-tests/drizzle2/singlestore/0000_nostalgic_carnage.sql new file mode 100644 index 000000000..50efe47da --- /dev/null +++ b/integration-tests/drizzle2/singlestore/0000_nostalgic_carnage.sql @@ -0,0 +1,20 @@ +CREATE TABLE `cities_migration` ( + `id` int, + `fullname_name` text, + `state` text +); +--> statement-breakpoint +CREATE TABLE `users_migration` ( + `id` int PRIMARY KEY NOT NULL, + `full_name` text, + `phone` int, + `invited_by` int, + `city_id` int, + `date` timestamp DEFAULT now() +); +--> statement-breakpoint +CREATE TABLE `users12` ( + `id` serial AUTO_INCREMENT PRIMARY KEY NOT NULL, + `name` text NOT NULL, + `email` text NOT NULL +); diff --git a/integration-tests/drizzle2/singlestore/meta/0000_snapshot.json b/integration-tests/drizzle2/singlestore/meta/0000_snapshot.json new file mode 100644 index 000000000..63d5ad187 --- /dev/null +++ b/integration-tests/drizzle2/singlestore/meta/0000_snapshot.json @@ -0,0 +1,132 @@ +{ + "version": "1", + "dialect": "singlestore", + "id": "8e8c8378-0496-40f6-88e3-98aab8282b1f", + "prevId": "00000000-0000-0000-0000-000000000000", + "tables": { + "cities_migration": { + "name": "cities_migration", + "columns": { + "id": { + "name": "id", + "type": "int", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "fullname_name": { + "name": "fullname_name", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "state": { + "name": "state", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {} + }, + "users_migration": { + "name": "users_migration", + "columns": { + "id": { + "name": "id", + "type": "int", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "full_name": { + "name": "full_name", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "phone": { + "name": "phone", + "type": "int", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "invited_by": { + "name": "invited_by", + "type": "int", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "city_id": { + "name": "city_id", + "type": "int", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "date": { + "name": "date", + "type": "timestamp", + "primaryKey": false, + "notNull": false, + "autoincrement": false, + "default": "now()" + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {} + }, + "users12": { + "name": "users12", + "columns": { + "id": { + "name": "id", + "type": "serial", + "primaryKey": true, + "notNull": true, + "autoincrement": true + }, + "name": { + "name": "name", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "email": { + "name": "email", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": { + "my_unique_index": { + "name": "my_unique_index", + "columns": [ + "name" + ], + "isUnique": true, + "using": "btree" + } + }, + "foreignKeys": {}, + "compositePrimaryKeys": {} + } + }, + "schemas": {}, + "_meta": { + "schemas": {}, + "tables": {}, + "columns": {} + } +} diff --git a/integration-tests/drizzle2/singlestore/meta/_journal.json b/integration-tests/drizzle2/singlestore/meta/_journal.json new file mode 100644 index 000000000..49e74f169 --- /dev/null +++ b/integration-tests/drizzle2/singlestore/meta/_journal.json @@ -0,0 +1,13 @@ +{ + "version": "1", + "dialect": "singlestore", + "entries": [ + { + "idx": 0, + "version": "1", + "when": 1680270921944, + "tag": "0000_nostalgic_carnage", + "breakpoints": true + } + ] +} diff --git a/integration-tests/tests/relational/singlestore.schema.ts b/integration-tests/tests/relational/singlestore.schema.ts new file mode 100644 index 000000000..ca3386ba0 --- /dev/null +++ b/integration-tests/tests/relational/singlestore.schema.ts @@ -0,0 +1,106 @@ +import { bigint, boolean, primaryKey, serial, singlestoreTable, text, timestamp } from 'drizzle-orm/singlestore-core'; + +import { relations } from 'drizzle-orm'; + +export const usersTable = singlestoreTable('users', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + verified: boolean('verified').notNull().default(false), + invitedBy: bigint('invited_by', { mode: 'number' }), +}); +export const usersConfig = relations(usersTable, ({ one, many }) => ({ + invitee: one(usersTable, { + fields: [usersTable.invitedBy], + references: [usersTable.id], + }), + usersToGroups: many(usersToGroupsTable), + posts: many(postsTable), + comments: many(commentsTable), +})); + +export const groupsTable = singlestoreTable('groups', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + description: text('description'), +}); +export const groupsConfig = relations(groupsTable, ({ many }) => ({ + usersToGroups: many(usersToGroupsTable), +})); + +export const usersToGroupsTable = singlestoreTable( + 'users_to_groups', + { + id: serial('id').primaryKey(), + userId: bigint('user_id', { mode: 'number' }).notNull(), + groupId: bigint('group_id', { mode: 'number' }).notNull(), + }, + (t) => ({ + pk: primaryKey(t.userId, t.groupId), + }), +); +export const usersToGroupsConfig = relations(usersToGroupsTable, ({ one }) => ({ + group: one(groupsTable, { + fields: [usersToGroupsTable.groupId], + references: [groupsTable.id], + }), + user: one(usersTable, { + fields: [usersToGroupsTable.userId], + references: [usersTable.id], + }), +})); + +export const postsTable = singlestoreTable('posts', { + id: serial('id').primaryKey(), + content: text('content').notNull(), + ownerId: bigint('owner_id', { mode: 'number' }), + createdAt: timestamp('created_at') + .notNull() + .defaultNow(), +}); +export const postsConfig = relations(postsTable, ({ one, many }) => ({ + author: one(usersTable, { + fields: [postsTable.ownerId], + references: [usersTable.id], + }), + comments: many(commentsTable), +})); + +export const commentsTable = singlestoreTable('comments', { + id: serial('id').primaryKey(), + content: text('content').notNull(), + creator: bigint('creator', { mode: 'number' }), + postId: bigint('post_id', { mode: 'number' }), + createdAt: timestamp('created_at') + .notNull() + .defaultNow(), +}); +export const commentsConfig = relations(commentsTable, ({ one, many }) => ({ + post: one(postsTable, { + fields: [commentsTable.postId], + references: [postsTable.id], + }), + author: one(usersTable, { + fields: [commentsTable.creator], + references: [usersTable.id], + }), + likes: many(commentLikesTable), +})); + +export const commentLikesTable = singlestoreTable('comment_likes', { + id: serial('id').primaryKey(), + creator: bigint('creator', { mode: 'number' }), + commentId: bigint('comment_id', { mode: 'number' }), + createdAt: timestamp('created_at') + .notNull() + .defaultNow(), +}); +export const commentLikesConfig = relations(commentLikesTable, ({ one }) => ({ + comment: one(commentsTable, { + fields: [commentLikesTable.commentId], + references: [commentsTable.id], + }), + author: one(usersTable, { + fields: [commentLikesTable.creator], + references: [usersTable.id], + }), +})); diff --git a/integration-tests/tests/relational/singlestore.test.ts b/integration-tests/tests/relational/singlestore.test.ts new file mode 100644 index 000000000..0e20a7046 --- /dev/null +++ b/integration-tests/tests/relational/singlestore.test.ts @@ -0,0 +1,6403 @@ +// import retry from 'async-retry'; +// import Docker from 'dockerode'; +// import 'dotenv/config'; +// import { desc, DrizzleError, eq, gt, gte, or, placeholder, sql, TransactionRollbackError } from 'drizzle-orm'; +// import { drizzle, type SingleStoreDriverDatabase } from 'drizzle-orm/singlestore'; +// import getPort from 'get-port'; +// import * as mysql from 'mysql2/promise'; +// import { v4 as uuid } from 'uuid'; +// import { afterAll, beforeAll, beforeEach, expect, expectTypeOf, test } from 'vitest'; +// import * as schema from './singlestore.schema.ts'; + +// const { usersTable, postsTable, commentsTable, usersToGroupsTable, groupsTable } = schema; + +// const ENABLE_LOGGING = false; + +// /* +// Test cases: +// - querying nested relation without PK with additional fields +// */ + +// declare module 'vitest' { +// export interface TestContext { +// docker: Docker; +// singlestoreContainer: Docker.Container; +// singlestoreDb: SingleStoreDriverDatabase; +// singlestoreClient: mysql.Connection; +// } +// } + +// let globalDocker: Docker; +// let singlestoreContainer: Docker.Container; +// let db: SingleStoreDriverDatabase; +// let client: mysql.Connection; + +// async function createDockerDB(): Promise { +// const docker = new Docker(); +// const port = await getPort({ port: 3306 }); +// const image = 'ghcr.io/singlestore-labs/singlestoredb-dev:latest'; + +// const pullStream = await docker.pull(image); +// await new Promise((resolve, reject) => +// docker.modem.followProgress(pullStream, (err) => (err ? reject(err) : resolve(err))) +// ); + +// singlestoreContainer = await docker.createContainer({ +// Image: image, +// Env: ['ROOT_PASSWORD=singlestore'], +// name: `drizzle-integration-tests-${uuid()}`, +// HostConfig: { +// AutoRemove: true, +// PortBindings: { +// '3306/tcp': [{ HostPort: `${port}` }], +// }, +// }, +// }); + +// await singlestoreContainer.start(); +// await new Promise((resolve) => setTimeout(resolve, 4000)); + +// return `singlestore://root:singlestore@localhost:${port}/`; +// } + +// beforeAll(async () => { +// const connectionString = process.env['SINGLESTORE_CONNECTION_STRING'] ?? (await createDockerDB()); +// client = await retry(async () => { +// client = await mysql.createConnection(connectionString); +// await client.connect(); +// return client; +// }, { +// retries: 20, +// factor: 1, +// minTimeout: 250, +// maxTimeout: 250, +// randomize: false, +// onRetry() { +// client?.end(); +// }, +// }); + +// await client.query(`CREATE DATABASE IF NOT EXISTS drizzle;`); +// await client.changeUser({ database: 'drizzle' }); +// db = drizzle(client, { schema, logger: ENABLE_LOGGING }); +// }); + +// afterAll(async () => { +// await client?.end().catch(console.error); +// await singlestoreContainer?.stop().catch(console.error); +// }); + +// beforeEach(async (ctx) => { +// ctx.singlestoreDb = db; +// ctx.singlestoreClient = client; +// ctx.docker = globalDocker; +// ctx.singlestoreContainer = singlestoreContainer; + +// await ctx.singlestoreDb.execute(sql`drop table if exists \`users\``); +// await ctx.singlestoreDb.execute(sql`drop table if exists \`groups\``); +// await ctx.singlestoreDb.execute(sql`drop table if exists \`users_to_groups\``); +// await ctx.singlestoreDb.execute(sql`drop table if exists \`posts\``); +// await ctx.singlestoreDb.execute(sql`drop table if exists \`comments\``); +// await ctx.singlestoreDb.execute(sql`drop table if exists \`comment_likes\``); + +// await ctx.singlestoreDb.execute( +// sql` +// CREATE TABLE \`users\` ( +// \`id\` serial PRIMARY KEY NOT NULL, +// \`name\` text NOT NULL, +// \`verified\` boolean DEFAULT false NOT NULL, +// \`invited_by\` bigint +// ); +// `, +// ); +// await ctx.singlestoreDb.execute( +// sql` +// CREATE TABLE \`groups\` ( +// \`id\` serial PRIMARY KEY NOT NULL, +// \`name\` text NOT NULL, +// \`description\` text +// ); +// `, +// ); +// await ctx.singlestoreDb.execute( +// sql` +// CREATE TABLE \`users_to_groups\` ( +// \`id\` serial PRIMARY KEY NOT NULL, +// \`user_id\` bigint, +// \`group_id\` bigint +// ); +// `, +// ); +// await ctx.singlestoreDb.execute( +// sql` +// CREATE TABLE \`posts\` ( +// \`id\` serial PRIMARY KEY NOT NULL, +// \`content\` text NOT NULL, +// \`owner_id\` bigint, +// \`created_at\` timestamp DEFAULT CURRENT_TIMESTAMP NOT NULL +// ); +// `, +// ); +// await ctx.singlestoreDb.execute( +// sql` +// CREATE TABLE \`comments\` ( +// \`id\` serial PRIMARY KEY NOT NULL, +// \`content\` text NOT NULL, +// \`creator\` bigint, +// \`post_id\` bigint, +// \`created_at\` timestamp DEFAULT CURRENT_TIMESTAMP NOT NULL +// ); +// `, +// ); +// await ctx.singlestoreDb.execute( +// sql` +// CREATE TABLE \`comment_likes\` ( +// \`id\` serial PRIMARY KEY NOT NULL, +// \`creator\` bigint, +// \`comment_id\` bigint, +// \`created_at\` timestamp DEFAULT CURRENT_TIMESTAMP NOT NULL +// ); +// `, +// ); +// }); + +// /* +// [Find Many] One relation users+posts +// */ + +// test('[Find Many] Get users with posts', async (t) => { +// const { singlestoreDb: db } = t; + +// await db.insert(usersTable).values([ +// { id: 1, name: 'Dan' }, +// { id: 2, name: 'Andrew' }, +// { id: 3, name: 'Alex' }, +// ]); + +// await db.insert(postsTable).values([ +// { ownerId: 1, content: 'Post1' }, +// { ownerId: 2, content: 'Post2' }, +// { ownerId: 3, content: 'Post3' }, +// ]); + +// const usersWithPosts = await db.query.usersTable.findMany({ +// with: { +// posts: true, +// }, +// }); + +// expectTypeOf(usersWithPosts).toEqualTypeOf<{ +// id: number; +// name: string; +// verified: boolean; +// invitedBy: number | null; +// posts: { +// id: number; +// content: string; +// ownerId: number | null; +// createdAt: Date; +// }[]; +// }[]>(); + +// usersWithPosts.sort((a, b) => (a.id > b.id) ? 1 : -1); + +// expect(usersWithPosts.length).eq(3); +// expect(usersWithPosts[0]?.posts.length).eq(1); +// expect(usersWithPosts[1]?.posts.length).eq(1); +// expect(usersWithPosts[2]?.posts.length).eq(1); + +// expect(usersWithPosts[0]).toEqual({ +// id: 1, +// name: 'Dan', +// verified: false, +// invitedBy: null, +// posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], +// }); +// expect(usersWithPosts[1]).toEqual({ +// id: 2, +// name: 'Andrew', +// verified: false, +// invitedBy: null, +// posts: [{ id: 2, ownerId: 2, content: 'Post2', createdAt: usersWithPosts[1]?.posts[0]?.createdAt }], +// }); +// expect(usersWithPosts[2]).toEqual({ +// id: 3, +// name: 'Alex', +// verified: false, +// invitedBy: null, +// posts: [{ id: 3, ownerId: 3, content: 'Post3', createdAt: usersWithPosts[2]?.posts[0]?.createdAt }], +// }); +// }); + +// test.skip('[Find Many] Get users with posts + limit posts', async (t) => { +// const { singlestoreDb: db } = t; + +// await db.insert(usersTable).values([ +// { id: 1, name: 'Dan' }, +// { id: 2, name: 'Andrew' }, +// { id: 3, name: 'Alex' }, +// ]); + +// await db.insert(postsTable).values([ +// { ownerId: 1, content: 'Post1' }, +// { ownerId: 1, content: 'Post1.2' }, +// { ownerId: 1, content: 'Post1.3' }, +// { ownerId: 2, content: 'Post2' }, +// { ownerId: 2, content: 'Post2.1' }, +// { ownerId: 3, content: 'Post3' }, +// { ownerId: 3, content: 'Post3.1' }, +// ]); + +// const usersWithPosts = await db.query.usersTable.findMany({ +// with: { +// posts: { +// limit: 1, +// }, +// }, +// }); + +// expectTypeOf(usersWithPosts).toEqualTypeOf<{ +// id: number; +// name: string; +// verified: boolean; +// invitedBy: number | null; +// posts: { +// id: number; +// content: string; +// ownerId: number | null; +// createdAt: Date; +// }[]; +// }[]>(); + +// usersWithPosts.sort((a, b) => (a.id > b.id) ? 1 : -1); +// usersWithPosts[0]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); +// usersWithPosts[1]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); +// usersWithPosts[2]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); + +// expect(usersWithPosts.length).eq(3); +// expect(usersWithPosts[0]?.posts.length).eq(1); +// expect(usersWithPosts[1]?.posts.length).eq(1); +// expect(usersWithPosts[2]?.posts.length).eq(1); + +// expect(usersWithPosts[0]).toEqual({ +// id: 1, +// name: 'Dan', +// verified: false, +// invitedBy: null, +// posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], +// }); +// expect(usersWithPosts[1]).toEqual({ +// id: 2, +// name: 'Andrew', +// verified: false, +// invitedBy: null, +// posts: [{ id: 4, ownerId: 2, content: 'Post2', createdAt: usersWithPosts[1]?.posts[0]?.createdAt }], +// }); +// expect(usersWithPosts[2]).toEqual({ +// id: 3, +// name: 'Alex', +// verified: false, +// invitedBy: null, +// posts: [{ id: 6, ownerId: 3, content: 'Post3', createdAt: usersWithPosts[2]?.posts[0]?.createdAt }], +// }); +// }); + +// test.skip('[Find Many] Get users with posts + limit posts and users', async (t) => { +// const { singlestoreDb: db } = t; + +// await db.insert(usersTable).values([ +// { id: 1, name: 'Dan' }, +// { id: 2, name: 'Andrew' }, +// { id: 3, name: 'Alex' }, +// ]); + +// await db.insert(postsTable).values([ +// { ownerId: 1, content: 'Post1' }, +// { ownerId: 1, content: 'Post1.2' }, +// { ownerId: 1, content: 'Post1.3' }, +// { ownerId: 2, content: 'Post2' }, +// { ownerId: 2, content: 'Post2.1' }, +// { ownerId: 3, content: 'Post3' }, +// { ownerId: 3, content: 'Post3.1' }, +// ]); + +// const usersWithPosts = await db.query.usersTable.findMany({ +// limit: 2, +// with: { +// posts: { +// limit: 1, +// }, +// }, +// }); + +// expectTypeOf(usersWithPosts).toEqualTypeOf<{ +// id: number; +// name: string; +// verified: boolean; +// invitedBy: number | null; +// posts: { +// id: number; +// content: string; +// ownerId: number | null; +// createdAt: Date; +// }[]; +// }[]>(); + +// usersWithPosts.sort((a, b) => (a.id > b.id) ? 1 : -1); +// usersWithPosts[0]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); +// usersWithPosts[1]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); + +// expect(usersWithPosts.length).eq(2); +// expect(usersWithPosts[0]?.posts.length).eq(1); +// expect(usersWithPosts[1]?.posts.length).eq(1); + +// expect(usersWithPosts[0]).toEqual({ +// id: 1, +// name: 'Dan', +// verified: false, +// invitedBy: null, +// posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], +// }); +// expect(usersWithPosts[1]).toEqual({ +// id: 2, +// name: 'Andrew', +// verified: false, +// invitedBy: null, +// posts: [{ id: 4, ownerId: 2, content: 'Post2', createdAt: usersWithPosts[1]?.posts[0]?.createdAt }], +// }); +// }); + +// test('[Find Many] Get users with posts + custom fields', async (t) => { +// const { singlestoreDb: db } = t; + +// await db.insert(usersTable).values([ +// { id: 1, name: 'Dan' }, +// { id: 2, name: 'Andrew' }, +// { id: 3, name: 'Alex' }, +// ]); + +// await db.insert(postsTable).values([ +// { ownerId: 1, content: 'Post1' }, +// { ownerId: 1, content: 'Post1.2' }, +// { ownerId: 1, content: 'Post1.3' }, +// { ownerId: 2, content: 'Post2' }, +// { ownerId: 2, content: 'Post2.1' }, +// { ownerId: 3, content: 'Post3' }, +// { ownerId: 3, content: 'Post3.1' }, +// ]); + +// const usersWithPosts = await db.query.usersTable.findMany({ +// with: { +// posts: true, +// }, +// extras: ({ name }) => ({ +// lowerName: sql`lower(${name})`.as('name_lower'), +// }), +// }); + +// expectTypeOf(usersWithPosts).toEqualTypeOf<{ +// id: number; +// name: string; +// verified: boolean; +// invitedBy: number | null; +// lowerName: string; +// posts: { +// id: number; +// content: string; +// ownerId: number | null; +// createdAt: Date; +// }[]; +// }[]>(); + +// usersWithPosts.sort((a, b) => (a.id > b.id) ? 1 : -1); +// usersWithPosts[0]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); +// usersWithPosts[1]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); +// usersWithPosts[2]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); + +// expect(usersWithPosts.length).toEqual(3); +// expect(usersWithPosts[0]?.posts.length).toEqual(3); +// expect(usersWithPosts[1]?.posts.length).toEqual(2); +// expect(usersWithPosts[2]?.posts.length).toEqual(2); + +// expect(usersWithPosts[0]).toEqual({ +// id: 1, +// name: 'Dan', +// verified: false, +// invitedBy: null, +// lowerName: 'dan', +// posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }, { +// id: 2, +// ownerId: 1, +// content: 'Post1.2', +// createdAt: usersWithPosts[0]?.posts[1]?.createdAt, +// }, { id: 3, ownerId: 1, content: 'Post1.3', createdAt: usersWithPosts[0]?.posts[2]?.createdAt }], +// }); +// expect(usersWithPosts[1]).toEqual({ +// id: 2, +// name: 'Andrew', +// lowerName: 'andrew', +// verified: false, +// invitedBy: null, +// posts: [{ id: 4, ownerId: 2, content: 'Post2', createdAt: usersWithPosts[1]?.posts[0]?.createdAt }, { +// id: 5, +// ownerId: 2, +// content: 'Post2.1', +// createdAt: usersWithPosts[1]?.posts[1]?.createdAt, +// }], +// }); +// expect(usersWithPosts[2]).toEqual({ +// id: 3, +// name: 'Alex', +// lowerName: 'alex', +// verified: false, +// invitedBy: null, +// posts: [{ id: 6, ownerId: 3, content: 'Post3', createdAt: usersWithPosts[2]?.posts[0]?.createdAt }, { +// id: 7, +// ownerId: 3, +// content: 'Post3.1', +// createdAt: usersWithPosts[2]?.posts[1]?.createdAt, +// }], +// }); +// }); + +// test.skip('[Find Many] Get users with posts + custom fields + limits', async (t) => { +// const { singlestoreDb: db } = t; + +// await db.insert(usersTable).values([ +// { id: 1, name: 'Dan' }, +// { id: 2, name: 'Andrew' }, +// { id: 3, name: 'Alex' }, +// ]); + +// await db.insert(postsTable).values([ +// { ownerId: 1, content: 'Post1' }, +// { ownerId: 1, content: 'Post1.2' }, +// { ownerId: 1, content: 'Post1.3' }, +// { ownerId: 2, content: 'Post2' }, +// { ownerId: 2, content: 'Post2.1' }, +// { ownerId: 3, content: 'Post3' }, +// { ownerId: 3, content: 'Post3.1' }, +// ]); + +// const usersWithPosts = await db.query.usersTable.findMany({ +// limit: 1, +// with: { +// posts: { +// limit: 1, +// }, +// }, +// extras: (usersTable, { sql }) => ({ +// lowerName: sql`lower(${usersTable.name})`.as('name_lower'), +// }), +// }); + +// expectTypeOf(usersWithPosts).toEqualTypeOf<{ +// id: number; +// name: string; +// verified: boolean; +// invitedBy: number | null; +// lowerName: string; +// posts: { +// id: number; +// content: string; +// ownerId: number | null; +// createdAt: Date; +// }[]; +// }[]>(); + +// expect(usersWithPosts.length).toEqual(1); +// expect(usersWithPosts[0]?.posts.length).toEqual(1); + +// expect(usersWithPosts[0]).toEqual({ +// id: 1, +// name: 'Dan', +// lowerName: 'dan', +// verified: false, +// invitedBy: null, +// posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], +// }); +// }); + +// test.skip('[Find Many] Get users with posts + orderBy', async (t) => { +// const { singlestoreDb: db } = t; + +// await db.insert(usersTable).values([ +// { id: 1, name: 'Dan' }, +// { id: 2, name: 'Andrew' }, +// { id: 3, name: 'Alex' }, +// ]); + +// await db.insert(postsTable).values([ +// { ownerId: 1, content: '1' }, +// { ownerId: 1, content: '2' }, +// { ownerId: 1, content: '3' }, +// { ownerId: 2, content: '4' }, +// { ownerId: 2, content: '5' }, +// { ownerId: 3, content: '6' }, +// { ownerId: 3, content: '7' }, +// ]); + +// const usersWithPosts = await db.query.usersTable.findMany({ +// with: { +// posts: { +// orderBy: (postsTable, { desc }) => [desc(postsTable.content)], +// }, +// }, +// orderBy: (usersTable, { desc }) => [desc(usersTable.id)], +// }); + +// expectTypeOf(usersWithPosts).toEqualTypeOf<{ +// id: number; +// name: string; +// verified: boolean; +// invitedBy: number | null; +// posts: { +// id: number; +// content: string; +// ownerId: number | null; +// createdAt: Date; +// }[]; +// }[]>(); + +// expect(usersWithPosts.length).eq(3); +// expect(usersWithPosts[0]?.posts.length).eq(2); +// expect(usersWithPosts[1]?.posts.length).eq(2); +// expect(usersWithPosts[2]?.posts.length).eq(3); + +// expect(usersWithPosts[2]).toEqual({ +// id: 1, +// name: 'Dan', +// verified: false, +// invitedBy: null, +// posts: [{ id: 3, ownerId: 1, content: '3', createdAt: usersWithPosts[2]?.posts[2]?.createdAt }, { +// id: 2, +// ownerId: 1, +// content: '2', +// createdAt: usersWithPosts[2]?.posts[1]?.createdAt, +// }, { id: 1, ownerId: 1, content: '1', createdAt: usersWithPosts[2]?.posts[0]?.createdAt }], +// }); +// expect(usersWithPosts[1]).toEqual({ +// id: 2, +// name: 'Andrew', +// verified: false, +// invitedBy: null, +// posts: [{ +// id: 5, +// ownerId: 2, +// content: '5', +// createdAt: usersWithPosts[1]?.posts[1]?.createdAt, +// }, { id: 4, ownerId: 2, content: '4', createdAt: usersWithPosts[1]?.posts[0]?.createdAt }], +// }); +// expect(usersWithPosts[0]).toEqual({ +// id: 3, +// name: 'Alex', +// verified: false, +// invitedBy: null, +// posts: [{ +// id: 7, +// ownerId: 3, +// content: '7', +// createdAt: usersWithPosts[0]?.posts[1]?.createdAt, +// }, { id: 6, ownerId: 3, content: '6', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], +// }); +// }); + +// test('[Find Many] Get users with posts + where', async (t) => { +// const { singlestoreDb: db } = t; + +// await db.insert(usersTable).values([ +// { id: 1, name: 'Dan' }, +// { id: 2, name: 'Andrew' }, +// { id: 3, name: 'Alex' }, +// ]); + +// await db.insert(postsTable).values([ +// { ownerId: 1, content: 'Post1' }, +// { ownerId: 1, content: 'Post1.1' }, +// { ownerId: 2, content: 'Post2' }, +// { ownerId: 3, content: 'Post3' }, +// ]); + +// const usersWithPosts = await db.query.usersTable.findMany({ +// where: (({ id }, { eq }) => eq(id, 1)), +// with: { +// posts: { +// where: (({ id }, { eq }) => eq(id, 1)), +// }, +// }, +// }); + +// expectTypeOf(usersWithPosts).toEqualTypeOf<{ +// id: number; +// name: string; +// verified: boolean; +// invitedBy: number | null; +// posts: { +// id: number; +// content: string; +// ownerId: number | null; +// createdAt: Date; +// }[]; +// }[]>(); + +// expect(usersWithPosts.length).eq(1); +// expect(usersWithPosts[0]?.posts.length).eq(1); + +// expect(usersWithPosts[0]).toEqual({ +// id: 1, +// name: 'Dan', +// verified: false, +// invitedBy: null, +// posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], +// }); +// }); + +// test('[Find Many] Get users with posts + where + partial', async (t) => { +// const { singlestoreDb: db } = t; + +// await db.insert(usersTable).values([ +// { id: 1, name: 'Dan' }, +// { id: 2, name: 'Andrew' }, +// { id: 3, name: 'Alex' }, +// ]); + +// await db.insert(postsTable).values([ +// { ownerId: 1, content: 'Post1' }, +// { ownerId: 1, content: 'Post1.1' }, +// { ownerId: 2, content: 'Post2' }, +// { ownerId: 3, content: 'Post3' }, +// ]); + +// const usersWithPosts = await db.query.usersTable.findMany({ +// columns: { +// id: true, +// name: true, +// }, +// with: { +// posts: { +// columns: { +// id: true, +// content: true, +// }, +// where: (({ id }, { eq }) => eq(id, 1)), +// }, +// }, +// where: (({ id }, { eq }) => eq(id, 1)), +// }); + +// expectTypeOf(usersWithPosts).toEqualTypeOf<{ +// id: number; +// name: string; +// posts: { +// id: number; +// content: string; +// }[]; +// }[]>(); + +// expect(usersWithPosts.length).eq(1); +// expect(usersWithPosts[0]?.posts.length).eq(1); + +// expect(usersWithPosts[0]).toEqual({ +// id: 1, +// name: 'Dan', +// posts: [{ id: 1, content: 'Post1' }], +// }); +// }); + +// test('[Find Many] Get users with posts + where + partial. Did not select posts id, but used it in where', async (t) => { +// const { singlestoreDb: db } = t; + +// await db.insert(usersTable).values([ +// { id: 1, name: 'Dan' }, +// { id: 2, name: 'Andrew' }, +// { id: 3, name: 'Alex' }, +// ]); + +// await db.insert(postsTable).values([ +// { ownerId: 1, content: 'Post1' }, +// { ownerId: 1, content: 'Post1.1' }, +// { ownerId: 2, content: 'Post2' }, +// { ownerId: 3, content: 'Post3' }, +// ]); + +// const usersWithPosts = await db.query.usersTable.findMany({ +// columns: { +// id: true, +// name: true, +// }, +// with: { +// posts: { +// columns: { +// id: true, +// content: true, +// }, +// where: (({ id }, { eq }) => eq(id, 1)), +// }, +// }, +// where: (({ id }, { eq }) => eq(id, 1)), +// }); + +// expectTypeOf(usersWithPosts).toEqualTypeOf<{ +// id: number; +// name: string; +// posts: { +// id: number; +// content: string; +// }[]; +// }[]>(); + +// expect(usersWithPosts.length).eq(1); +// expect(usersWithPosts[0]?.posts.length).eq(1); + +// expect(usersWithPosts[0]).toEqual({ +// id: 1, +// name: 'Dan', +// posts: [{ id: 1, content: 'Post1' }], +// }); +// }); + +// test('[Find Many] Get users with posts + where + partial(true + false)', async (t) => { +// const { singlestoreDb: db } = t; + +// await db.insert(usersTable).values([ +// { id: 1, name: 'Dan' }, +// { id: 2, name: 'Andrew' }, +// { id: 3, name: 'Alex' }, +// ]); + +// await db.insert(postsTable).values([ +// { ownerId: 1, content: 'Post1' }, +// { ownerId: 1, content: 'Post1.1' }, +// { ownerId: 2, content: 'Post2' }, +// { ownerId: 3, content: 'Post3' }, +// ]); + +// const usersWithPosts = await db.query.usersTable.findMany({ +// columns: { +// id: true, +// name: false, +// }, +// with: { +// posts: { +// columns: { +// id: true, +// content: false, +// }, +// where: (({ id }, { eq }) => eq(id, 1)), +// }, +// }, +// where: (({ id }, { eq }) => eq(id, 1)), +// }); + +// expectTypeOf(usersWithPosts).toEqualTypeOf<{ +// id: number; +// posts: { +// id: number; +// }[]; +// }[]>(); + +// expect(usersWithPosts.length).eq(1); +// expect(usersWithPosts[0]?.posts.length).eq(1); + +// expect(usersWithPosts[0]).toEqual({ +// id: 1, +// posts: [{ id: 1 }], +// }); +// }); + +// test('[Find Many] Get users with posts + where + partial(false)', async (t) => { +// const { singlestoreDb: db } = t; + +// await db.insert(usersTable).values([ +// { id: 1, name: 'Dan' }, +// { id: 2, name: 'Andrew' }, +// { id: 3, name: 'Alex' }, +// ]); + +// await db.insert(postsTable).values([ +// { ownerId: 1, content: 'Post1' }, +// { ownerId: 1, content: 'Post1.1' }, +// { ownerId: 2, content: 'Post2' }, +// { ownerId: 3, content: 'Post3' }, +// ]); + +// const usersWithPosts = await db.query.usersTable.findMany({ +// columns: { +// name: false, +// }, +// with: { +// posts: { +// columns: { +// content: false, +// }, +// where: (({ id }, { eq }) => eq(id, 1)), +// }, +// }, +// where: (({ id }, { eq }) => eq(id, 1)), +// }); + +// expectTypeOf(usersWithPosts).toEqualTypeOf<{ +// id: number; +// verified: boolean; +// invitedBy: number | null; +// posts: { +// id: number; +// ownerId: number | null; +// createdAt: Date; +// }[]; +// }[]>(); + +// expect(usersWithPosts.length).eq(1); +// expect(usersWithPosts[0]?.posts.length).eq(1); + +// expect(usersWithPosts[0]).toEqual({ +// id: 1, +// verified: false, +// invitedBy: null, +// posts: [{ id: 1, ownerId: 1, createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], +// }); +// }); + +// test('[Find Many] Get users with posts in transaction', async (t) => { +// const { singlestoreDb: db } = t; + +// let usersWithPosts: { +// id: number; +// name: string; +// verified: boolean; +// invitedBy: number | null; +// posts: { +// id: number; +// content: string; +// ownerId: number | null; +// createdAt: Date; +// }[]; +// }[] = []; + +// await db.transaction(async (tx) => { +// await tx.insert(usersTable).values([ +// { id: 1, name: 'Dan' }, +// { id: 2, name: 'Andrew' }, +// { id: 3, name: 'Alex' }, +// ]); + +// await tx.insert(postsTable).values([ +// { ownerId: 1, content: 'Post1' }, +// { ownerId: 1, content: 'Post1.1' }, +// { ownerId: 2, content: 'Post2' }, +// { ownerId: 3, content: 'Post3' }, +// ]); + +// usersWithPosts = await tx.query.usersTable.findMany({ +// where: (({ id }, { eq }) => eq(id, 1)), +// with: { +// posts: { +// where: (({ id }, { eq }) => eq(id, 1)), +// }, +// }, +// }); +// }); + +// expectTypeOf(usersWithPosts).toEqualTypeOf<{ +// id: number; +// name: string; +// verified: boolean; +// invitedBy: number | null; +// posts: { +// id: number; +// content: string; +// ownerId: number | null; +// createdAt: Date; +// }[]; +// }[]>(); + +// expect(usersWithPosts.length).eq(1); +// expect(usersWithPosts[0]?.posts.length).eq(1); + +// expect(usersWithPosts[0]).toEqual({ +// id: 1, +// name: 'Dan', +// verified: false, +// invitedBy: null, +// posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], +// }); +// }); + +// test('[Find Many] Get users with posts in rollbacked transaction', async (t) => { +// const { singlestoreDb: db } = t; + +// let usersWithPosts: { +// id: number; +// name: string; +// verified: boolean; +// invitedBy: number | null; +// posts: { +// id: number; +// content: string; +// ownerId: number | null; +// createdAt: Date; +// }[]; +// }[] = []; + +// await expect(db.transaction(async (tx) => { +// await tx.insert(usersTable).values([ +// { id: 1, name: 'Dan' }, +// { id: 2, name: 'Andrew' }, +// { id: 3, name: 'Alex' }, +// ]); + +// await tx.insert(postsTable).values([ +// { ownerId: 1, content: 'Post1' }, +// { ownerId: 1, content: 'Post1.1' }, +// { ownerId: 2, content: 'Post2' }, +// { ownerId: 3, content: 'Post3' }, +// ]); + +// tx.rollback(); + +// usersWithPosts = await tx.query.usersTable.findMany({ +// where: (({ id }, { eq }) => eq(id, 1)), +// with: { +// posts: { +// where: (({ id }, { eq }) => eq(id, 1)), +// }, +// }, +// }); +// })).rejects.toThrowError(new TransactionRollbackError()); + +// expectTypeOf(usersWithPosts).toEqualTypeOf<{ +// id: number; +// name: string; +// verified: boolean; +// invitedBy: number | null; +// posts: { +// id: number; +// content: string; +// ownerId: number | null; +// createdAt: Date; +// }[]; +// }[]>(); + +// expect(usersWithPosts.length).eq(0); +// }); + +// // select only custom +// test('[Find Many] Get only custom fields', async () => { +// await db.insert(usersTable).values([ +// { id: 1, name: 'Dan' }, +// { id: 2, name: 'Andrew' }, +// { id: 3, name: 'Alex' }, +// ]); + +// await db.insert(postsTable).values([ +// { id: 1, ownerId: 1, content: 'Post1' }, +// { id: 2, ownerId: 1, content: 'Post1.2' }, +// { id: 3, ownerId: 1, content: 'Post1.3' }, +// { id: 4, ownerId: 2, content: 'Post2' }, +// { id: 5, ownerId: 2, content: 'Post2.1' }, +// { id: 6, ownerId: 3, content: 'Post3' }, +// { id: 7, ownerId: 3, content: 'Post3.1' }, +// ]); + +// const usersWithPosts = await db.query.usersTable.findMany({ +// columns: {}, +// with: { +// posts: { +// columns: {}, +// extras: ({ content }) => ({ +// lowerName: sql`lower(${content})`.as('content_lower'), +// }), +// }, +// }, +// extras: ({ name }) => ({ +// lowerName: sql`lower(${name})`.as('name_lower'), +// }), +// }); + +// // Type Assertion +// expectTypeOf(usersWithPosts).toEqualTypeOf<{ +// lowerName: string; +// posts: { +// lowerName: string; +// }[]; +// }[]>(); + +// // General Assertions +// expect(usersWithPosts).toHaveLength(3); + +// // Helper function to find user by lowerName +// const findUser = (lowerName: string) => usersWithPosts.find((user) => user.lowerName === lowerName); + +// // Assertions for each user +// const dan = findUser('dan'); +// const andrew = findUser('andrew'); +// const alex = findUser('alex'); + +// expect(dan).toBeDefined(); +// expect(andrew).toBeDefined(); +// expect(alex).toBeDefined(); + +// // Verify the number of posts for each user +// expect(dan?.posts).toHaveLength(3); +// expect(andrew?.posts).toHaveLength(2); +// expect(alex?.posts).toHaveLength(2); + +// // Define expected posts for each user +// const expectedDanPosts = ['post1', 'post1.2', 'post1.3']; +// const expectedAndrewPosts = ['post2', 'post2.1']; +// const expectedAlexPosts = ['post3', 'post3.1']; + +// // Helper function to extract lowerNames from posts +// const getPostLowerNames = (posts: { lowerName: string }[]) => posts.map((post) => post.lowerName); + +// // Assertions for Dan's posts +// expect(getPostLowerNames(dan!.posts)).toEqual(expect.arrayContaining(expectedDanPosts)); +// expect(getPostLowerNames(dan!.posts)).toHaveLength(expectedDanPosts.length); + +// // Assertions for Andrew's posts +// expect(getPostLowerNames(andrew!.posts)).toEqual(expect.arrayContaining(expectedAndrewPosts)); +// expect(getPostLowerNames(andrew!.posts)).toHaveLength(expectedAndrewPosts.length); + +// // Assertions for Alex's posts +// expect(getPostLowerNames(alex!.posts)).toEqual(expect.arrayContaining(expectedAlexPosts)); +// expect(getPostLowerNames(alex!.posts)).toHaveLength(expectedAlexPosts.length); +// }); + +// // select only custom with where clause (Order Agnostic) +// test('[Find Many] Get only custom fields + where', async (t) => { +// const { singlestoreDb: db } = t; + +// // Insert Users +// await db.insert(usersTable).values([ +// { id: 1, name: 'Dan' }, +// { id: 2, name: 'Andrew' }, +// { id: 3, name: 'Alex' }, +// ]); + +// // Insert Posts +// await db.insert(postsTable).values([ +// { ownerId: 1, content: 'Post1' }, +// { ownerId: 1, content: 'Post1.2' }, +// { ownerId: 1, content: 'Post1.3' }, +// { ownerId: 2, content: 'Post2' }, +// { ownerId: 2, content: 'Post2.1' }, +// { ownerId: 3, content: 'Post3' }, +// { ownerId: 3, content: 'Post3.1' }, +// ]); + +// // Query Users with Posts where users.id = 1 and posts.id >= 2 +// const usersWithPosts = await db.query.usersTable.findMany({ +// columns: {}, +// with: { +// posts: { +// columns: {}, +// where: gte(postsTable.id, 2), +// extras: ({ content }) => ({ +// lowerName: sql`lower(${content})`.as('content_lower'), +// }), +// }, +// }, +// where: eq(usersTable.id, 1), +// extras: ({ name }) => ({ +// lowerName: sql`lower(${name})`.as('name_lower'), +// }), +// }); + +// // Type Assertion +// expectTypeOf(usersWithPosts).toEqualTypeOf<{ +// lowerName: string; +// posts: { +// lowerName: string; +// }[]; +// }[]>(); + +// // General Assertions +// expect(usersWithPosts).toHaveLength(1); + +// // Since we expect only one user, we can extract it directly +// const danWithPosts = usersWithPosts[0]; + +// // Assert that the user exists and has the correct lowerName +// expect(danWithPosts).toBeDefined(); +// expect(danWithPosts?.lowerName).toBe('dan'); + +// // Assert that the user has the expected number of posts +// expect(danWithPosts?.posts).toHaveLength(2); + +// // Define the expected posts +// const expectedPosts = ['post1.2', 'post1.3']; + +// // Extract the lowerName of each post +// const actualPostLowerNames = danWithPosts?.posts.map((post) => post.lowerName); + +// // Assert that all expected posts are present, regardless of order +// for (const expectedPost of expectedPosts) { +// expect(actualPostLowerNames).toContain(expectedPost); +// } + +// // Additionally, ensure no unexpected posts are present +// expect(actualPostLowerNames).toHaveLength(expectedPosts.length); +// }); + +// test.skip('[Find Many] Get only custom fields + where + limit', async (t) => { +// const { singlestoreDb: db } = t; + +// await db.insert(usersTable).values([ +// { id: 1, name: 'Dan' }, +// { id: 2, name: 'Andrew' }, +// { id: 3, name: 'Alex' }, +// ]); + +// await db.insert(postsTable).values([ +// { ownerId: 1, content: 'Post1' }, +// { ownerId: 1, content: 'Post1.2' }, +// { ownerId: 1, content: 'Post1.3' }, +// { ownerId: 2, content: 'Post2' }, +// { ownerId: 2, content: 'Post2.1' }, +// { ownerId: 3, content: 'Post3' }, +// { ownerId: 3, content: 'Post3.1' }, +// ]); + +// const usersWithPosts = await db.query.usersTable.findMany({ +// columns: {}, +// with: { +// posts: { +// columns: {}, +// where: gte(postsTable.id, 2), +// limit: 1, +// extras: ({ content }) => ({ +// lowerName: sql`lower(${content})`.as('content_lower'), +// }), +// }, +// }, +// where: eq(usersTable.id, 1), +// extras: ({ name }) => ({ +// lowerName: sql`lower(${name})`.as('name_lower'), +// }), +// }); + +// expectTypeOf(usersWithPosts).toEqualTypeOf<{ +// lowerName: string; +// posts: { +// lowerName: string; +// }[]; +// }[]>(); + +// expect(usersWithPosts.length).toEqual(1); +// expect(usersWithPosts[0]?.posts.length).toEqual(1); + +// expect(usersWithPosts).toContainEqual({ +// lowerName: 'dan', +// posts: [{ lowerName: 'post1.2' }], +// }); +// }); + +// test.skip('[Find Many] Get only custom fields + where + orderBy', async (t) => { +// const { singlestoreDb: db } = t; + +// await db.insert(usersTable).values([ +// { id: 1, name: 'Dan' }, +// { id: 2, name: 'Andrew' }, +// { id: 3, name: 'Alex' }, +// ]); + +// await db.insert(postsTable).values([ +// { ownerId: 1, content: 'Post1' }, +// { ownerId: 1, content: 'Post1.2' }, +// { ownerId: 1, content: 'Post1.3' }, +// { ownerId: 2, content: 'Post2' }, +// { ownerId: 2, content: 'Post2.1' }, +// { ownerId: 3, content: 'Post3' }, +// { ownerId: 3, content: 'Post3.1' }, +// ]); + +// const usersWithPosts = await db.query.usersTable.findMany({ +// columns: {}, +// with: { +// posts: { +// columns: {}, +// where: gte(postsTable.id, 2), +// orderBy: [desc(postsTable.id)], +// extras: ({ content }) => ({ +// lowerName: sql`lower(${content})`.as('content_lower'), +// }), +// }, +// }, +// where: eq(usersTable.id, 1), +// extras: ({ name }) => ({ +// lowerName: sql`lower(${name})`.as('name_lower'), +// }), +// }); + +// expectTypeOf(usersWithPosts).toEqualTypeOf<{ +// lowerName: string; +// posts: { +// lowerName: string; +// }[]; +// }[]>(); + +// expect(usersWithPosts.length).toEqual(1); +// expect(usersWithPosts[0]?.posts.length).toEqual(2); + +// expect(usersWithPosts).toContainEqual({ +// lowerName: 'dan', +// posts: [{ lowerName: 'post1.3' }, { lowerName: 'post1.2' }], +// }); +// }); + +// // select only custom find one (Order Agnostic) +// test('[Find One] Get only custom fields (Order Agnostic)', async () => { +// await db.insert(usersTable).values([ +// { id: 1, name: 'Dan' }, +// { id: 2, name: 'Andrew' }, +// { id: 3, name: 'Alex' }, +// ]); + +// await db.insert(postsTable).values([ +// { ownerId: 1, content: 'Post1' }, +// { ownerId: 1, content: 'Post1.2' }, +// { ownerId: 1, content: 'Post1.3' }, +// { ownerId: 2, content: 'Post2' }, +// { ownerId: 2, content: 'Post2.1' }, +// { ownerId: 3, content: 'Post3' }, +// { ownerId: 3, content: 'Post3.1' }, +// ]); + +// // Query to find the first user without any specific order +// const usersWithPosts = await db.query.usersTable.findFirst({ +// columns: {}, +// with: { +// posts: { +// columns: {}, +// extras: ({ content }) => ({ +// lowerName: sql`lower(${content})`.as('content_lower'), +// }), +// }, +// }, +// extras: ({ name }) => ({ +// lowerName: sql`lower(${name})`.as('name_lower'), +// }), +// }); + +// // Type Assertion +// expectTypeOf(usersWithPosts).toEqualTypeOf< +// { +// lowerName: string; +// posts: { +// lowerName: string; +// }[]; +// } | undefined +// >(); + +// // General Assertions +// expect(usersWithPosts).toBeDefined(); + +// // Since findFirst without orderBy can return any user, we'll verify the returned user and their posts +// if (usersWithPosts) { +// // Define expected users and their corresponding posts +// const expectedUsers: { [key: string]: string[] } = { +// dan: ['post1', 'post1.2', 'post1.3'], +// andrew: ['post2', 'post2.1'], +// alex: ['post3', 'post3.1'], +// }; + +// // Verify that the returned user is one of the expected users +// expect(Object.keys(expectedUsers)).toContain(usersWithPosts.lowerName); + +// // Get the expected posts for the returned user +// const expectedPosts = expectedUsers[usersWithPosts.lowerName] as string[]; + +// // Verify the number of posts +// expect(usersWithPosts.posts).toHaveLength(expectedPosts.length); + +// // Extract the lowerName of each post +// const actualPostLowerNames = usersWithPosts.posts.map((post) => post.lowerName); + +// // Assert that all expected posts are present, regardless of order +// for (const expectedPost of expectedPosts) { +// expect(actualPostLowerNames).toContain(expectedPost.toLowerCase()); +// } +// } +// }); + +// // select only custom find one with where clause (Order Agnostic) +// test('[Find One] Get only custom fields + where (Order Agnostic)', async (t) => { +// const { singlestoreDb: db } = t; + +// await db.insert(usersTable).values([ +// { id: 1, name: 'Dan' }, +// { id: 2, name: 'Andrew' }, +// { id: 3, name: 'Alex' }, +// ]); + +// await db.insert(postsTable).values([ +// { ownerId: 1, content: 'Post1' }, +// { ownerId: 1, content: 'Post1.2' }, +// { ownerId: 1, content: 'Post1.3' }, +// { ownerId: 2, content: 'Post2' }, +// { ownerId: 2, content: 'Post2.1' }, +// { ownerId: 3, content: 'Post3' }, +// { ownerId: 3, content: 'Post3.1' }, +// ]); + +// // Query to find the first user with id = 1 and posts with id >= 2 +// const usersWithPosts = await db.query.usersTable.findFirst({ +// columns: {}, +// with: { +// posts: { +// columns: {}, +// where: gte(postsTable.id, 2), +// extras: ({ content }) => ({ +// lowerName: sql`lower(${content})`.as('content_lower'), +// }), +// }, +// }, +// where: eq(usersTable.id, 1), +// extras: ({ name }) => ({ +// lowerName: sql`lower(${name})`.as('name_lower'), +// }), +// }); + +// // Type Assertion +// expectTypeOf(usersWithPosts).toEqualTypeOf< +// { +// lowerName: string; +// posts: { +// lowerName: string; +// }[]; +// } | undefined +// >(); + +// // General Assertions +// expect(usersWithPosts).toBeDefined(); + +// if (usersWithPosts) { +// // Assert that the returned user has the expected lowerName +// expect(usersWithPosts.lowerName).toBe('dan'); + +// // Assert that the user has exactly two posts +// expect(usersWithPosts.posts).toHaveLength(2); + +// // Define the expected posts +// const expectedPosts = ['post1.2', 'post1.3']; + +// // Extract the lowerName of each post +// const actualPostLowerNames = usersWithPosts.posts.map((post) => post.lowerName); + +// // Assert that all expected posts are present, regardless of order +// for (const expectedPost of expectedPosts) { +// expect(actualPostLowerNames).toContain(expectedPost.toLowerCase()); +// } + +// // Additionally, ensure no unexpected posts are present +// expect(actualPostLowerNames).toHaveLength(expectedPosts.length); +// } +// }); + +// test.skip('[Find One] Get only custom fields + where + limit', async (t) => { +// const { singlestoreDb: db } = t; + +// await db.insert(usersTable).values([ +// { id: 1, name: 'Dan' }, +// { id: 2, name: 'Andrew' }, +// { id: 3, name: 'Alex' }, +// ]); + +// await db.insert(postsTable).values([ +// { ownerId: 1, content: 'Post1' }, +// { ownerId: 1, content: 'Post1.2' }, +// { ownerId: 1, content: 'Post1.3' }, +// { ownerId: 2, content: 'Post2' }, +// { ownerId: 2, content: 'Post2.1' }, +// { ownerId: 3, content: 'Post3' }, +// { ownerId: 3, content: 'Post3.1' }, +// ]); + +// const usersWithPosts = await db.query.usersTable.findFirst({ +// columns: {}, +// with: { +// posts: { +// columns: {}, +// where: gte(postsTable.id, 2), +// limit: 1, +// extras: ({ content }) => ({ +// lowerName: sql`lower(${content})`.as('content_lower'), +// }), +// }, +// }, +// where: eq(usersTable.id, 1), +// extras: ({ name }) => ({ +// lowerName: sql`lower(${name})`.as('name_lower'), +// }), +// }); + +// expectTypeOf(usersWithPosts).toEqualTypeOf< +// { +// lowerName: string; +// posts: { +// lowerName: string; +// }[]; +// } | undefined +// >(); + +// expect(usersWithPosts?.posts.length).toEqual(1); + +// expect(usersWithPosts).toEqual({ +// lowerName: 'dan', +// posts: [{ lowerName: 'post1.2' }], +// }); +// }); + +// test.skip('[Find One] Get only custom fields + where + orderBy', async (t) => { +// const { singlestoreDb: db } = t; + +// await db.insert(usersTable).values([ +// { id: 1, name: 'Dan' }, +// { id: 2, name: 'Andrew' }, +// { id: 3, name: 'Alex' }, +// ]); + +// await db.insert(postsTable).values([ +// { ownerId: 1, content: 'Post1' }, +// { ownerId: 1, content: 'Post1.2' }, +// { ownerId: 1, content: 'Post1.3' }, +// { ownerId: 2, content: 'Post2' }, +// { ownerId: 2, content: 'Post2.1' }, +// { ownerId: 3, content: 'Post3' }, +// { ownerId: 3, content: 'Post3.1' }, +// ]); + +// const usersWithPosts = await db.query.usersTable.findFirst({ +// columns: {}, +// with: { +// posts: { +// columns: {}, +// where: gte(postsTable.id, 2), +// orderBy: [desc(postsTable.id)], +// extras: ({ content }) => ({ +// lowerName: sql`lower(${content})`.as('content_lower'), +// }), +// }, +// }, +// where: eq(usersTable.id, 1), +// extras: ({ name }) => ({ +// lowerName: sql`lower(${name})`.as('name_lower'), +// }), +// }); + +// expectTypeOf(usersWithPosts).toEqualTypeOf< +// { +// lowerName: string; +// posts: { +// lowerName: string; +// }[]; +// } | undefined +// >(); + +// expect(usersWithPosts?.posts.length).toEqual(2); + +// expect(usersWithPosts).toEqual({ +// lowerName: 'dan', +// posts: [{ lowerName: 'post1.3' }, { lowerName: 'post1.2' }], +// }); +// }); + +// // columns {} +// test('[Find Many] Get select {}', async (t) => { +// const { singlestoreDb: db } = t; + +// await db.insert(usersTable).values([ +// { id: 1, name: 'Dan' }, +// { id: 2, name: 'Andrew' }, +// { id: 3, name: 'Alex' }, +// ]); + +// await expect( +// async () => +// await db.query.usersTable.findMany({ +// columns: {}, +// }), +// ).rejects.toThrow(DrizzleError); +// }); + +// // columns {} +// test('[Find One] Get select {}', async (t) => { +// const { singlestoreDb: db } = t; + +// await db.insert(usersTable).values([ +// { id: 1, name: 'Dan' }, +// { id: 2, name: 'Andrew' }, +// { id: 3, name: 'Alex' }, +// ]); + +// await expect(async () => +// await db.query.usersTable.findFirst({ +// columns: {}, +// }) +// ).rejects.toThrow(DrizzleError); +// }); + +// // deep select {} +// test('[Find Many] Get deep select {}', async (t) => { +// const { singlestoreDb: db } = t; + +// await db.insert(usersTable).values([ +// { id: 1, name: 'Dan' }, +// { id: 2, name: 'Andrew' }, +// { id: 3, name: 'Alex' }, +// ]); + +// await db.insert(postsTable).values([ +// { ownerId: 1, content: 'Post1' }, +// { ownerId: 2, content: 'Post2' }, +// { ownerId: 3, content: 'Post3' }, +// ]); + +// await expect(async () => +// await db.query.usersTable.findMany({ +// columns: {}, +// with: { +// posts: { +// columns: {}, +// }, +// }, +// }) +// ).rejects.toThrow(DrizzleError); +// }); + +// // deep select {} +// test('[Find One] Get deep select {}', async (t) => { +// const { singlestoreDb: db } = t; + +// await db.insert(usersTable).values([ +// { id: 1, name: 'Dan' }, +// { id: 2, name: 'Andrew' }, +// { id: 3, name: 'Alex' }, +// ]); + +// await db.insert(postsTable).values([ +// { ownerId: 1, content: 'Post1' }, +// { ownerId: 2, content: 'Post2' }, +// { ownerId: 3, content: 'Post3' }, +// ]); + +// await expect(async () => +// await db.query.usersTable.findFirst({ +// columns: {}, +// with: { +// posts: { +// columns: {}, +// }, +// }, +// }) +// ).rejects.toThrow(DrizzleError); +// }); + +// /* +// Prepared statements for users+posts +// */ +// test.skip('[Find Many] Get users with posts + prepared limit', async (t) => { +// const { singlestoreDb: db } = t; + +// await db.insert(usersTable).values([ +// { id: 1, name: 'Dan' }, +// { id: 2, name: 'Andrew' }, +// { id: 3, name: 'Alex' }, +// ]); + +// await db.insert(postsTable).values([ +// { ownerId: 1, content: 'Post1' }, +// { ownerId: 1, content: 'Post1.2' }, +// { ownerId: 1, content: 'Post1.3' }, +// { ownerId: 2, content: 'Post2' }, +// { ownerId: 2, content: 'Post2.1' }, +// { ownerId: 3, content: 'Post3' }, +// { ownerId: 3, content: 'Post3.1' }, +// ]); + +// const prepared = db.query.usersTable.findMany({ +// with: { +// posts: { +// limit: placeholder('limit'), +// }, +// }, +// }).prepare(); + +// const usersWithPosts = await prepared.execute({ limit: 1 }); + +// expectTypeOf(usersWithPosts).toEqualTypeOf<{ +// id: number; +// name: string; +// verified: boolean; +// invitedBy: number | null; +// posts: { +// id: number; +// content: string; +// ownerId: number | null; +// createdAt: Date; +// }[]; +// }[]>(); + +// expect(usersWithPosts.length).eq(3); +// expect(usersWithPosts[0]?.posts.length).eq(1); +// expect(usersWithPosts[1]?.posts.length).eq(1); +// expect(usersWithPosts[2]?.posts.length).eq(1); + +// expect(usersWithPosts).toContainEqual({ +// id: 1, +// name: 'Dan', +// verified: false, +// invitedBy: null, +// posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], +// }); +// expect(usersWithPosts).toContainEqual({ +// id: 2, +// name: 'Andrew', +// verified: false, +// invitedBy: null, +// posts: [{ id: 4, ownerId: 2, content: 'Post2', createdAt: usersWithPosts[1]?.posts[0]?.createdAt }], +// }); +// expect(usersWithPosts).toContainEqual({ +// id: 3, +// name: 'Alex', +// verified: false, +// invitedBy: null, +// posts: [{ id: 6, ownerId: 3, content: 'Post3', createdAt: usersWithPosts[2]?.posts[0]?.createdAt }], +// }); +// }); + +// test.skip('[Find Many] Get users with posts + prepared limit + offset', async (t) => { +// const { singlestoreDb: db } = t; + +// await db.insert(usersTable).values([ +// { id: 1, name: 'Dan' }, +// { id: 2, name: 'Andrew' }, +// { id: 3, name: 'Alex' }, +// ]); + +// await db.insert(postsTable).values([ +// { ownerId: 1, content: 'Post1' }, +// { ownerId: 1, content: 'Post1.2' }, +// { ownerId: 1, content: 'Post1.3' }, +// { ownerId: 2, content: 'Post2' }, +// { ownerId: 2, content: 'Post2.1' }, +// { ownerId: 3, content: 'Post3' }, +// { ownerId: 3, content: 'Post3.1' }, +// ]); + +// const prepared = db.query.usersTable.findMany({ +// limit: placeholder('uLimit'), +// offset: placeholder('uOffset'), +// with: { +// posts: { +// limit: placeholder('pLimit'), +// }, +// }, +// }).prepare(); + +// const usersWithPosts = await prepared.execute({ pLimit: 1, uLimit: 3, uOffset: 1 }); + +// expectTypeOf(usersWithPosts).toEqualTypeOf<{ +// id: number; +// name: string; +// verified: boolean; +// invitedBy: number | null; +// posts: { +// id: number; +// content: string; +// ownerId: number | null; +// createdAt: Date; +// }[]; +// }[]>(); + +// expect(usersWithPosts.length).eq(2); +// expect(usersWithPosts[0]?.posts.length).eq(1); +// expect(usersWithPosts[1]?.posts.length).eq(1); + +// expect(usersWithPosts).toContainEqual({ +// id: 2, +// name: 'Andrew', +// verified: false, +// invitedBy: null, +// posts: [{ id: 4, ownerId: 2, content: 'Post2', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], +// }); +// expect(usersWithPosts).toContainEqual({ +// id: 3, +// name: 'Alex', +// verified: false, +// invitedBy: null, +// posts: [{ id: 6, ownerId: 3, content: 'Post3', createdAt: usersWithPosts[1]?.posts[0]?.createdAt }], +// }); +// }); + +// test('[Find Many] Get users with posts + prepared where', async (t) => { +// const { singlestoreDb: db } = t; + +// await db.insert(usersTable).values([ +// { id: 1, name: 'Dan' }, +// { id: 2, name: 'Andrew' }, +// { id: 3, name: 'Alex' }, +// ]); + +// await db.insert(postsTable).values([ +// { ownerId: 1, content: 'Post1' }, +// { ownerId: 1, content: 'Post1.1' }, +// { ownerId: 2, content: 'Post2' }, +// { ownerId: 3, content: 'Post3' }, +// ]); + +// const prepared = db.query.usersTable.findMany({ +// where: (({ id }, { eq }) => eq(id, placeholder('id'))), +// with: { +// posts: { +// where: (({ id }, { eq }) => eq(id, 1)), +// }, +// }, +// }).prepare(); + +// const usersWithPosts = await prepared.execute({ id: 1 }); + +// expectTypeOf(usersWithPosts).toEqualTypeOf<{ +// id: number; +// name: string; +// verified: boolean; +// invitedBy: number | null; +// posts: { +// id: number; +// content: string; +// ownerId: number | null; +// createdAt: Date; +// }[]; +// }[]>(); + +// expect(usersWithPosts.length).eq(1); +// expect(usersWithPosts[0]?.posts.length).eq(1); + +// expect(usersWithPosts[0]).toEqual({ +// id: 1, +// name: 'Dan', +// verified: false, +// invitedBy: null, +// posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], +// }); +// }); + +// test.skip('[Find Many] Get users with posts + prepared + limit + offset + where', async (t) => { +// const { singlestoreDb: db } = t; + +// await db.insert(usersTable).values([ +// { id: 1, name: 'Dan' }, +// { id: 2, name: 'Andrew' }, +// { id: 3, name: 'Alex' }, +// ]); + +// await db.insert(postsTable).values([ +// { ownerId: 1, content: 'Post1' }, +// { ownerId: 1, content: 'Post1.2' }, +// { ownerId: 1, content: 'Post1.3' }, +// { ownerId: 2, content: 'Post2' }, +// { ownerId: 2, content: 'Post2.1' }, +// { ownerId: 3, content: 'Post3' }, +// { ownerId: 3, content: 'Post3.1' }, +// ]); + +// const prepared = db.query.usersTable.findMany({ +// limit: placeholder('uLimit'), +// offset: placeholder('uOffset'), +// where: (({ id }, { eq, or }) => or(eq(id, placeholder('id')), eq(id, 3))), +// with: { +// posts: { +// where: (({ id }, { eq }) => eq(id, placeholder('pid'))), +// limit: placeholder('pLimit'), +// }, +// }, +// }).prepare(); + +// const usersWithPosts = await prepared.execute({ pLimit: 1, uLimit: 3, uOffset: 1, id: 2, pid: 6 }); + +// expectTypeOf(usersWithPosts).toEqualTypeOf<{ +// id: number; +// name: string; +// verified: boolean; +// invitedBy: number | null; +// posts: { +// id: number; +// content: string; +// ownerId: number | null; +// createdAt: Date; +// }[]; +// }[]>(); + +// expect(usersWithPosts.length).eq(1); +// expect(usersWithPosts[0]?.posts.length).eq(1); + +// expect(usersWithPosts).toContainEqual({ +// id: 3, +// name: 'Alex', +// verified: false, +// invitedBy: null, +// posts: [{ id: 6, ownerId: 3, content: 'Post3', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], +// }); +// }); + +// /* +// [Find One] One relation users+posts +// */ + +// test.only('[Find One] Get users with posts', async (t) => { +// const { singlestoreDb: db } = t; + +// await db.insert(usersTable).values([ +// { id: 1, name: 'Dan' }, +// { id: 2, name: 'Andrew' }, +// { id: 3, name: 'Alex' }, +// ]); + +// await db.insert(postsTable).values([ +// { ownerId: 1, content: 'Post1' }, +// { ownerId: 2, content: 'Post2' }, +// { ownerId: 3, content: 'Post3' }, +// ]); + +// const usersWithPosts = await db.query.usersTable.findFirst({ +// with: { +// posts: true, +// }, +// }); + +// // Type Assertion +// expectTypeOf(usersWithPosts).toEqualTypeOf< +// { +// id: number; +// name: string; +// verified: boolean; +// invitedBy: number | null; +// posts: { +// id: number; +// content: string; +// ownerId: number | null; +// createdAt: Date; +// }[]; +// } | undefined +// >(); + +// // General Assertions +// expect(usersWithPosts).toBeDefined(); + +// if (usersWithPosts) { +// const { id, name, posts } = usersWithPosts; + +// // Verify that the user is one of the inserted users +// const validUsers: { [key: number]: string } = { +// 1: 'dan', +// 2: 'andrew', +// 3: 'alex', +// }; +// expect(validUsers[id]).toBe(name.toLowerCase()); + +// // Assert that the user has exactly one post +// expect(posts).toHaveLength(1); + +// const post = posts[0]; + +// // Verify that the post belongs to the user +// expect(post?.ownerId).toBe(id); + +// // Verify that the post content matches the user +// const expectedPostContent = `Post${id}`; +// expect(post?.content.toLowerCase()).toBe(expectedPostContent.toLowerCase()); + +// // Optionally, verify the presence of `createdAt` +// expect(post?.createdAt).toBeInstanceOf(Date); +// } +// }); + +// test.skip('[Find One] Get users with posts + limit posts', async (t) => { +// const { singlestoreDb: db } = t; + +// await db.insert(usersTable).values([ +// { id: 1, name: 'Dan' }, +// { id: 2, name: 'Andrew' }, +// { id: 3, name: 'Alex' }, +// ]); + +// await db.insert(postsTable).values([ +// { ownerId: 1, content: 'Post1' }, +// { ownerId: 1, content: 'Post1.2' }, +// { ownerId: 1, content: 'Post1.3' }, +// { ownerId: 2, content: 'Post2' }, +// { ownerId: 2, content: 'Post2.1' }, +// { ownerId: 3, content: 'Post3' }, +// { ownerId: 3, content: 'Post3.1' }, +// ]); + +// const usersWithPosts = await db.query.usersTable.findFirst({ +// with: { +// posts: { +// limit: 1, +// }, +// }, +// }); + +// expectTypeOf(usersWithPosts).toEqualTypeOf< +// { +// id: number; +// name: string; +// verified: boolean; +// invitedBy: number | null; +// posts: { +// id: number; +// content: string; +// ownerId: number | null; +// createdAt: Date; +// }[]; +// } | undefined +// >(); + +// expect(usersWithPosts!.posts.length).eq(1); + +// expect(usersWithPosts).toEqual({ +// id: 1, +// name: 'Dan', +// verified: false, +// invitedBy: null, +// posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts?.posts[0]?.createdAt }], +// }); +// }); + +// test.skip('[Find One] Get users with posts no results found', async (t) => { +// const { singlestoreDb: db } = t; + +// const usersWithPosts = await db.query.usersTable.findFirst({ +// with: { +// posts: { +// limit: 1, +// }, +// }, +// }); + +// expectTypeOf(usersWithPosts).toEqualTypeOf< +// { +// id: number; +// name: string; +// verified: boolean; +// invitedBy: number | null; +// posts: { +// id: number; +// content: string; +// ownerId: number | null; +// createdAt: Date; +// }[]; +// } | undefined +// >(); + +// expect(usersWithPosts).toBeUndefined(); +// }); + +// test.skip('[Find One] Get users with posts + limit posts and users', async (t) => { +// const { singlestoreDb: db } = t; + +// await db.insert(usersTable).values([ +// { id: 1, name: 'Dan' }, +// { id: 2, name: 'Andrew' }, +// { id: 3, name: 'Alex' }, +// ]); + +// await db.insert(postsTable).values([ +// { ownerId: 1, content: 'Post1' }, +// { ownerId: 1, content: 'Post1.2' }, +// { ownerId: 1, content: 'Post1.3' }, +// { ownerId: 2, content: 'Post2' }, +// { ownerId: 2, content: 'Post2.1' }, +// { ownerId: 3, content: 'Post3' }, +// { ownerId: 3, content: 'Post3.1' }, +// ]); + +// const usersWithPosts = await db.query.usersTable.findFirst({ +// with: { +// posts: { +// limit: 1, +// }, +// }, +// }); + +// expectTypeOf(usersWithPosts).toEqualTypeOf< +// { +// id: number; +// name: string; +// verified: boolean; +// invitedBy: number | null; +// posts: { +// id: number; +// content: string; +// ownerId: number | null; +// createdAt: Date; +// }[]; +// } | undefined +// >(); + +// expect(usersWithPosts!.posts.length).eq(1); + +// expect(usersWithPosts).toEqual({ +// id: 1, +// name: 'Dan', +// verified: false, +// invitedBy: null, +// posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts?.posts[0]?.createdAt }], +// }); +// }); + +// test('[Find One] Get users with posts + custom fields', async () => { +// await db.insert(usersTable).values([ +// { id: 1, name: 'Dan' }, +// { id: 2, name: 'Andrew' }, +// { id: 3, name: 'Alex' }, +// ]); + +// await db.insert(postsTable).values([ +// { ownerId: 1, content: 'Post1' }, +// { ownerId: 1, content: 'Post1.2' }, +// { ownerId: 1, content: 'Post1.3' }, +// { ownerId: 2, content: 'Post2' }, +// { ownerId: 2, content: 'Post2.1' }, +// { ownerId: 3, content: 'Post3' }, +// { ownerId: 3, content: 'Post3.1' }, +// ]); + +// const usersWithPosts = await db.query.usersTable.findFirst({ +// with: { +// posts: true, +// }, +// extras: ({ name }) => ({ +// lowerName: sql`lower(${name})`.as('name_lower'), +// }), +// }); + +// // Type Assertion +// expectTypeOf(usersWithPosts).toEqualTypeOf< +// { +// id: number; +// name: string; +// verified: boolean; +// invitedBy: number | null; +// lowerName: string; +// posts: { +// id: number; +// content: string; +// ownerId: number | null; +// createdAt: Date; +// }[]; +// } | undefined +// >(); + +// // General Assertions +// expect(usersWithPosts).toBeDefined(); + +// if (usersWithPosts) { +// const { id, lowerName, posts } = usersWithPosts; + +// // Define valid users and their expected lower names +// const validUsers: { [key: number]: string } = { +// 1: 'dan', +// 2: 'andrew', +// 3: 'alex', +// }; + +// // Verify that the returned user's lowerName matches the expected value +// expect(validUsers[id]).toBe(lowerName); + +// // Define the expected posts based on the user ID +// const expectedPostsByUser: Record = { +// 1: ['post1', 'post1.2', 'post1.3'], +// 2: ['post2', 'post2.1'], +// 3: ['post3', 'post3.1'], +// }; + +// // Get the expected posts for the returned user +// const expectedPosts = expectedPostsByUser[id] || []; + +// // Extract the lowerName of each post +// const actualPostContents = posts.map((post) => post.content.toLowerCase()); + +// // Assert that all expected posts are present, regardless of order +// for (const expectedPost of expectedPosts) { +// expect(actualPostContents).toContain(expectedPost.toLowerCase()); +// } + +// // Optionally, ensure that no unexpected posts are present +// expect(actualPostContents).toHaveLength(expectedPosts.length); +// } +// }); + +// test.skip('[Find One] Get users with posts + custom fields + limits', async (t) => { +// const { singlestoreDb: db } = t; + +// await db.insert(usersTable).values([ +// { id: 1, name: 'Dan' }, +// { id: 2, name: 'Andrew' }, +// { id: 3, name: 'Alex' }, +// ]); + +// await db.insert(postsTable).values([ +// { ownerId: 1, content: 'Post1' }, +// { ownerId: 1, content: 'Post1.2' }, +// { ownerId: 1, content: 'Post1.3' }, +// { ownerId: 2, content: 'Post2' }, +// { ownerId: 2, content: 'Post2.1' }, +// { ownerId: 3, content: 'Post3' }, +// { ownerId: 3, content: 'Post3.1' }, +// ]); + +// const usersWithPosts = await db.query.usersTable.findFirst({ +// with: { +// posts: { +// limit: 1, +// }, +// }, +// extras: (usersTable, { sql }) => ({ +// lowerName: sql`lower(${usersTable.name})`.as('name_lower'), +// }), +// }); + +// expectTypeOf(usersWithPosts).toEqualTypeOf< +// { +// id: number; +// name: string; +// verified: boolean; +// invitedBy: number | null; +// lowerName: string; +// posts: { +// id: number; +// content: string; +// ownerId: number | null; +// createdAt: Date; +// }[]; +// } | undefined +// >(); + +// expect(usersWithPosts!.posts.length).toEqual(1); + +// expect(usersWithPosts).toEqual({ +// id: 1, +// name: 'Dan', +// lowerName: 'dan', +// verified: false, +// invitedBy: null, +// posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts?.posts[0]?.createdAt }], +// }); +// }); + +// test.skip('[Find One] Get users with posts + orderBy', async (t) => { +// const { singlestoreDb: db } = t; + +// await db.insert(usersTable).values([ +// { id: 1, name: 'Dan' }, +// { id: 2, name: 'Andrew' }, +// { id: 3, name: 'Alex' }, +// ]); + +// await db.insert(postsTable).values([ +// { ownerId: 1, content: '1' }, +// { ownerId: 1, content: '2' }, +// { ownerId: 1, content: '3' }, +// { ownerId: 2, content: '4' }, +// { ownerId: 2, content: '5' }, +// { ownerId: 3, content: '6' }, +// { ownerId: 3, content: '7' }, +// ]); + +// const usersWithPosts = await db.query.usersTable.findFirst({ +// with: { +// posts: { +// orderBy: (postsTable, { desc }) => [desc(postsTable.content)], +// }, +// }, +// orderBy: (usersTable, { desc }) => [desc(usersTable.id)], +// }); + +// expectTypeOf(usersWithPosts).toEqualTypeOf< +// { +// id: number; +// name: string; +// verified: boolean; +// invitedBy: number | null; +// posts: { +// id: number; +// content: string; +// ownerId: number | null; +// createdAt: Date; +// }[]; +// } | undefined +// >(); + +// expect(usersWithPosts!.posts.length).eq(2); + +// expect(usersWithPosts).toEqual({ +// id: 3, +// name: 'Alex', +// verified: false, +// invitedBy: null, +// posts: [{ +// id: 7, +// ownerId: 3, +// content: '7', +// createdAt: usersWithPosts?.posts[1]?.createdAt, +// }, { id: 6, ownerId: 3, content: '6', createdAt: usersWithPosts?.posts[0]?.createdAt }], +// }); +// }); + +// test('[Find One] Get users with posts + where', async (t) => { +// const { singlestoreDb: db } = t; + +// await db.insert(usersTable).values([ +// { id: 1, name: 'Dan' }, +// { id: 2, name: 'Andrew' }, +// { id: 3, name: 'Alex' }, +// ]); + +// await db.insert(postsTable).values([ +// { ownerId: 1, content: 'Post1' }, +// { ownerId: 1, content: 'Post1.1' }, +// { ownerId: 2, content: 'Post2' }, +// { ownerId: 3, content: 'Post3' }, +// ]); + +// const usersWithPosts = await db.query.usersTable.findFirst({ +// where: (({ id }, { eq }) => eq(id, 1)), +// with: { +// posts: { +// where: (({ id }, { eq }) => eq(id, 1)), +// }, +// }, +// }); + +// expectTypeOf(usersWithPosts).toEqualTypeOf< +// { +// id: number; +// name: string; +// verified: boolean; +// invitedBy: number | null; +// posts: { +// id: number; +// content: string; +// ownerId: number | null; +// createdAt: Date; +// }[]; +// } | undefined +// >(); + +// expect(usersWithPosts!.posts.length).eq(1); + +// expect(usersWithPosts).toEqual({ +// id: 1, +// name: 'Dan', +// verified: false, +// invitedBy: null, +// posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts?.posts[0]?.createdAt }], +// }); +// }); + +// test('[Find One] Get users with posts + where + partial', async (t) => { +// const { singlestoreDb: db } = t; + +// await db.insert(usersTable).values([ +// { id: 1, name: 'Dan' }, +// { id: 2, name: 'Andrew' }, +// { id: 3, name: 'Alex' }, +// ]); + +// await db.insert(postsTable).values([ +// { ownerId: 1, content: 'Post1' }, +// { ownerId: 1, content: 'Post1.1' }, +// { ownerId: 2, content: 'Post2' }, +// { ownerId: 3, content: 'Post3' }, +// ]); + +// const usersWithPosts = await db.query.usersTable.findFirst({ +// columns: { +// id: true, +// name: true, +// }, +// with: { +// posts: { +// columns: { +// id: true, +// content: true, +// }, +// where: (({ id }, { eq }) => eq(id, 1)), +// }, +// }, +// where: (({ id }, { eq }) => eq(id, 1)), +// }); + +// expectTypeOf(usersWithPosts).toEqualTypeOf< +// { +// id: number; +// name: string; +// posts: { +// id: number; +// content: string; +// }[]; +// } | undefined +// >(); + +// expect(usersWithPosts!.posts.length).eq(1); + +// expect(usersWithPosts).toEqual({ +// id: 1, +// name: 'Dan', +// posts: [{ id: 1, content: 'Post1' }], +// }); +// }); + +// test.skip('[Find One] Get users with posts + where + partial. Did not select posts id, but used it in where', async (t) => { +// const { singlestoreDb: db } = t; + +// await db.insert(usersTable).values([ +// { id: 1, name: 'Dan' }, +// { id: 2, name: 'Andrew' }, +// { id: 3, name: 'Alex' }, +// ]); + +// await db.insert(postsTable).values([ +// { ownerId: 1, content: 'Post1' }, +// { ownerId: 1, content: 'Post1.1' }, +// { ownerId: 2, content: 'Post2' }, +// { ownerId: 3, content: 'Post3' }, +// ]); + +// const usersWithPosts = await db.query.usersTable.findFirst({ +// columns: { +// id: true, +// name: true, +// }, +// with: { +// posts: { +// columns: { +// id: true, +// content: true, +// }, +// where: (({ id }, { eq }) => eq(id, 1)), +// }, +// }, +// where: (({ id }, { eq }) => eq(id, 1)), +// }); + +// expectTypeOf(usersWithPosts).toEqualTypeOf< +// { +// id: number; +// name: string; +// posts: { +// id: number; +// content: string; +// }[]; +// } | undefined +// >(); + +// expect(usersWithPosts!.posts.length).eq(1); + +// expect(usersWithPosts).toEqual({ +// id: 1, +// name: 'Dan', +// posts: [{ id: 1, content: 'Post1' }], +// }); +// }); + +// test.skip('[Find One] Get users with posts + where + partial(true + false)', async (t) => { +// const { singlestoreDb: db } = t; + +// await db.insert(usersTable).values([ +// { id: 1, name: 'Dan' }, +// { id: 2, name: 'Andrew' }, +// { id: 3, name: 'Alex' }, +// ]); + +// await db.insert(postsTable).values([ +// { ownerId: 1, content: 'Post1' }, +// { ownerId: 1, content: 'Post1.1' }, +// { ownerId: 2, content: 'Post2' }, +// { ownerId: 3, content: 'Post3' }, +// ]); + +// const usersWithPosts = await db.query.usersTable.findFirst({ +// columns: { +// id: true, +// name: false, +// }, +// with: { +// posts: { +// columns: { +// id: true, +// content: false, +// }, +// where: (({ id }, { eq }) => eq(id, 1)), +// }, +// }, +// where: (({ id }, { eq }) => eq(id, 1)), +// }); + +// expectTypeOf(usersWithPosts).toEqualTypeOf< +// { +// id: number; +// posts: { +// id: number; +// }[]; +// } | undefined +// >(); + +// expect(usersWithPosts!.posts.length).eq(1); + +// expect(usersWithPosts).toEqual({ +// id: 1, +// posts: [{ id: 1 }], +// }); +// }); + +// test.skip('[Find One] Get users with posts + where + partial(false)', async (t) => { +// const { singlestoreDb: db } = t; + +// await db.insert(usersTable).values([ +// { id: 1, name: 'Dan' }, +// { id: 2, name: 'Andrew' }, +// { id: 3, name: 'Alex' }, +// ]); + +// await db.insert(postsTable).values([ +// { ownerId: 1, content: 'Post1' }, +// { ownerId: 1, content: 'Post1.1' }, +// { ownerId: 2, content: 'Post2' }, +// { ownerId: 3, content: 'Post3' }, +// ]); + +// const usersWithPosts = await db.query.usersTable.findFirst({ +// columns: { +// name: false, +// }, +// with: { +// posts: { +// columns: { +// content: false, +// }, +// where: (({ id }, { eq }) => eq(id, 1)), +// }, +// }, +// where: (({ id }, { eq }) => eq(id, 1)), +// }); + +// expectTypeOf(usersWithPosts).toEqualTypeOf< +// { +// id: number; +// verified: boolean; +// invitedBy: number | null; +// posts: { +// id: number; +// ownerId: number | null; +// createdAt: Date; +// }[]; +// } | undefined +// >(); + +// expect(usersWithPosts!.posts.length).eq(1); + +// expect(usersWithPosts).toEqual({ +// id: 1, +// verified: false, +// invitedBy: null, +// posts: [{ id: 1, ownerId: 1, createdAt: usersWithPosts?.posts[0]?.createdAt }], +// }); +// }); + +// /* +// One relation users+users. Self referencing +// */ + +// test.skip('Get user with invitee', async (t) => { +// const { singlestoreDb: db } = t; + +// await db.insert(usersTable).values([ +// { id: 1, name: 'Dan' }, +// { id: 2, name: 'Andrew' }, +// { id: 3, name: 'Alex', invitedBy: 1 }, +// { id: 4, name: 'John', invitedBy: 2 }, +// ]); + +// const usersWithInvitee = await db.query.usersTable.findMany({ +// with: { +// invitee: true, +// }, +// }); + +// expectTypeOf(usersWithInvitee).toEqualTypeOf< +// { +// id: number; +// name: string; +// verified: boolean; +// invitedBy: number | null; +// invitee: { +// id: number; +// name: string; +// verified: boolean; +// invitedBy: number | null; +// } | null; +// }[] +// >(); + +// usersWithInvitee.sort((a, b) => (a.id > b.id) ? 1 : -1); + +// expect(usersWithInvitee.length).eq(4); +// expect(usersWithInvitee[0]?.invitee).toBeNull(); +// expect(usersWithInvitee[1]?.invitee).toBeNull(); +// expect(usersWithInvitee[2]?.invitee).not.toBeNull(); +// expect(usersWithInvitee[3]?.invitee).not.toBeNull(); + +// expect(usersWithInvitee[0]).toEqual({ +// id: 1, +// name: 'Dan', +// verified: false, +// invitedBy: null, +// invitee: null, +// }); +// expect(usersWithInvitee[1]).toEqual({ +// id: 2, +// name: 'Andrew', +// verified: false, +// invitedBy: null, +// invitee: null, +// }); +// expect(usersWithInvitee[2]).toEqual({ +// id: 3, +// name: 'Alex', +// verified: false, +// invitedBy: 1, +// invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, +// }); +// expect(usersWithInvitee[3]).toEqual({ +// id: 4, +// name: 'John', +// verified: false, +// invitedBy: 2, +// invitee: { id: 2, name: 'Andrew', verified: false, invitedBy: null }, +// }); +// }); + +// test.skip('Get user + limit with invitee', async (t) => { +// const { singlestoreDb: db } = t; + +// await db.insert(usersTable).values([ +// { id: 1, name: 'Dan' }, +// { id: 2, name: 'Andrew', invitedBy: 1 }, +// { id: 3, name: 'Alex', invitedBy: 1 }, +// { id: 4, name: 'John', invitedBy: 2 }, +// ]); + +// const usersWithInvitee = await db.query.usersTable.findMany({ +// with: { +// invitee: true, +// }, +// limit: 2, +// }); + +// expectTypeOf(usersWithInvitee).toEqualTypeOf< +// { +// id: number; +// name: string; +// verified: boolean; +// invitedBy: number | null; +// invitee: { +// id: number; +// name: string; +// verified: boolean; +// invitedBy: number | null; +// } | null; +// }[] +// >(); + +// usersWithInvitee.sort((a, b) => (a.id > b.id) ? 1 : -1); + +// expect(usersWithInvitee.length).eq(2); +// expect(usersWithInvitee[0]?.invitee).toBeNull(); +// expect(usersWithInvitee[1]?.invitee).not.toBeNull(); + +// expect(usersWithInvitee[0]).toEqual({ +// id: 1, +// name: 'Dan', +// verified: false, +// invitedBy: null, +// invitee: null, +// }); +// expect(usersWithInvitee[1]).toEqual({ +// id: 2, +// name: 'Andrew', +// verified: false, +// invitedBy: 1, +// invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, +// }); +// }); + +// test.skip('Get user with invitee and custom fields', async (t) => { +// const { singlestoreDb: db } = t; + +// await db.insert(usersTable).values([ +// { id: 1, name: 'Dan' }, +// { id: 2, name: 'Andrew' }, +// { id: 3, name: 'Alex', invitedBy: 1 }, +// { id: 4, name: 'John', invitedBy: 2 }, +// ]); + +// const usersWithInvitee = await db.query.usersTable.findMany({ +// extras: (users, { sql }) => ({ lower: sql`lower(${users.name})`.as('lower_name') }), +// with: { +// invitee: { +// extras: (invitee, { sql }) => ({ lower: sql`lower(${invitee.name})`.as('lower_name') }), +// }, +// }, +// }); + +// expectTypeOf(usersWithInvitee).toEqualTypeOf< +// { +// id: number; +// name: string; +// verified: boolean; +// lower: string; +// invitedBy: number | null; +// invitee: { +// id: number; +// name: string; +// verified: boolean; +// lower: string; +// invitedBy: number | null; +// } | null; +// }[] +// >(); + +// usersWithInvitee.sort((a, b) => (a.id > b.id) ? 1 : -1); + +// expect(usersWithInvitee.length).eq(4); +// expect(usersWithInvitee[0]?.invitee).toBeNull(); +// expect(usersWithInvitee[1]?.invitee).toBeNull(); +// expect(usersWithInvitee[2]?.invitee).not.toBeNull(); +// expect(usersWithInvitee[3]?.invitee).not.toBeNull(); + +// expect(usersWithInvitee[0]).toEqual({ +// id: 1, +// name: 'Dan', +// lower: 'dan', +// verified: false, +// invitedBy: null, +// invitee: null, +// }); +// expect(usersWithInvitee[1]).toEqual({ +// id: 2, +// name: 'Andrew', +// lower: 'andrew', +// verified: false, +// invitedBy: null, +// invitee: null, +// }); +// expect(usersWithInvitee[2]).toEqual({ +// id: 3, +// name: 'Alex', +// lower: 'alex', +// verified: false, +// invitedBy: 1, +// invitee: { id: 1, name: 'Dan', lower: 'dan', verified: false, invitedBy: null }, +// }); +// expect(usersWithInvitee[3]).toEqual({ +// id: 4, +// name: 'John', +// lower: 'john', +// verified: false, +// invitedBy: 2, +// invitee: { id: 2, name: 'Andrew', lower: 'andrew', verified: false, invitedBy: null }, +// }); +// }); + +// test.skip('Get user with invitee and custom fields + limits', async (t) => { +// const { singlestoreDb: db } = t; + +// await db.insert(usersTable).values([ +// { id: 1, name: 'Dan' }, +// { id: 2, name: 'Andrew' }, +// { id: 3, name: 'Alex', invitedBy: 1 }, +// { id: 4, name: 'John', invitedBy: 2 }, +// ]); + +// const usersWithInvitee = await db.query.usersTable.findMany({ +// extras: (users, { sql }) => ({ lower: sql`lower(${users.name})`.as('lower_name') }), +// limit: 3, +// with: { +// invitee: { +// extras: (invitee, { sql }) => ({ lower: sql`lower(${invitee.name})`.as('lower_name') }), +// }, +// }, +// }); + +// expectTypeOf(usersWithInvitee).toEqualTypeOf< +// { +// id: number; +// name: string; +// verified: boolean; +// lower: string; +// invitedBy: number | null; +// invitee: { +// id: number; +// name: string; +// verified: boolean; +// lower: string; +// invitedBy: number | null; +// } | null; +// }[] +// >(); + +// usersWithInvitee.sort((a, b) => (a.id > b.id) ? 1 : -1); + +// expect(usersWithInvitee.length).eq(3); +// expect(usersWithInvitee[0]?.invitee).toBeNull(); +// expect(usersWithInvitee[1]?.invitee).toBeNull(); +// expect(usersWithInvitee[2]?.invitee).not.toBeNull(); + +// expect(usersWithInvitee[0]).toEqual({ +// id: 1, +// name: 'Dan', +// lower: 'dan', +// verified: false, +// invitedBy: null, +// invitee: null, +// }); +// expect(usersWithInvitee[1]).toEqual({ +// id: 2, +// name: 'Andrew', +// lower: 'andrew', +// verified: false, +// invitedBy: null, +// invitee: null, +// }); +// expect(usersWithInvitee[2]).toEqual({ +// id: 3, +// name: 'Alex', +// lower: 'alex', +// verified: false, +// invitedBy: 1, +// invitee: { id: 1, name: 'Dan', lower: 'dan', verified: false, invitedBy: null }, +// }); +// }); + +// test.skip('Get user with invitee + order by', async (t) => { +// const { singlestoreDb: db } = t; + +// await db.insert(usersTable).values([ +// { id: 1, name: 'Dan' }, +// { id: 2, name: 'Andrew' }, +// { id: 3, name: 'Alex', invitedBy: 1 }, +// { id: 4, name: 'John', invitedBy: 2 }, +// ]); + +// const usersWithInvitee = await db.query.usersTable.findMany({ +// orderBy: (users, { desc }) => [desc(users.id)], +// with: { +// invitee: true, +// }, +// }); + +// expectTypeOf(usersWithInvitee).toEqualTypeOf< +// { +// id: number; +// name: string; +// verified: boolean; +// invitedBy: number | null; +// invitee: { +// id: number; +// name: string; +// verified: boolean; +// invitedBy: number | null; +// } | null; +// }[] +// >(); + +// expect(usersWithInvitee.length).eq(4); +// expect(usersWithInvitee[3]?.invitee).toBeNull(); +// expect(usersWithInvitee[2]?.invitee).toBeNull(); +// expect(usersWithInvitee[1]?.invitee).not.toBeNull(); +// expect(usersWithInvitee[0]?.invitee).not.toBeNull(); + +// expect(usersWithInvitee[3]).toEqual({ +// id: 1, +// name: 'Dan', +// verified: false, +// invitedBy: null, +// invitee: null, +// }); +// expect(usersWithInvitee[2]).toEqual({ +// id: 2, +// name: 'Andrew', +// verified: false, +// invitedBy: null, +// invitee: null, +// }); +// expect(usersWithInvitee[1]).toEqual({ +// id: 3, +// name: 'Alex', +// verified: false, +// invitedBy: 1, +// invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, +// }); +// expect(usersWithInvitee[0]).toEqual({ +// id: 4, +// name: 'John', +// verified: false, +// invitedBy: 2, +// invitee: { id: 2, name: 'Andrew', verified: false, invitedBy: null }, +// }); +// }); + +// test.skip('Get user with invitee + where', async (t) => { +// const { singlestoreDb: db } = t; + +// await db.insert(usersTable).values([ +// { id: 1, name: 'Dan' }, +// { id: 2, name: 'Andrew' }, +// { id: 3, name: 'Alex', invitedBy: 1 }, +// { id: 4, name: 'John', invitedBy: 2 }, +// ]); + +// const usersWithInvitee = await db.query.usersTable.findMany({ +// where: (users, { eq, or }) => (or(eq(users.id, 3), eq(users.id, 4))), +// with: { +// invitee: true, +// }, +// }); + +// expectTypeOf(usersWithInvitee).toEqualTypeOf< +// { +// id: number; +// name: string; +// verified: boolean; +// invitedBy: number | null; +// invitee: { +// id: number; +// name: string; +// verified: boolean; +// invitedBy: number | null; +// } | null; +// }[] +// >(); + +// expect(usersWithInvitee.length).eq(2); +// expect(usersWithInvitee[0]?.invitee).not.toBeNull(); +// expect(usersWithInvitee[1]?.invitee).not.toBeNull(); + +// expect(usersWithInvitee).toContainEqual({ +// id: 3, +// name: 'Alex', +// verified: false, +// invitedBy: 1, +// invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, +// }); +// expect(usersWithInvitee).toContainEqual({ +// id: 4, +// name: 'John', +// verified: false, +// invitedBy: 2, +// invitee: { id: 2, name: 'Andrew', verified: false, invitedBy: null }, +// }); +// }); + +// test.skip('Get user with invitee + where + partial', async (t) => { +// const { singlestoreDb: db } = t; + +// await db.insert(usersTable).values([ +// { id: 1, name: 'Dan' }, +// { id: 2, name: 'Andrew' }, +// { id: 3, name: 'Alex', invitedBy: 1 }, +// { id: 4, name: 'John', invitedBy: 2 }, +// ]); + +// const usersWithInvitee = await db.query.usersTable.findMany({ +// where: (users, { eq, or }) => (or(eq(users.id, 3), eq(users.id, 4))), +// columns: { +// id: true, +// name: true, +// }, +// with: { +// invitee: { +// columns: { +// id: true, +// name: true, +// }, +// }, +// }, +// }); + +// expectTypeOf(usersWithInvitee).toEqualTypeOf< +// { +// id: number; +// name: string; +// invitee: { +// id: number; +// name: string; +// } | null; +// }[] +// >(); + +// expect(usersWithInvitee.length).eq(2); +// expect(usersWithInvitee[0]?.invitee).not.toBeNull(); +// expect(usersWithInvitee[1]?.invitee).not.toBeNull(); + +// expect(usersWithInvitee).toContainEqual({ +// id: 3, +// name: 'Alex', +// invitee: { id: 1, name: 'Dan' }, +// }); +// expect(usersWithInvitee).toContainEqual({ +// id: 4, +// name: 'John', +// invitee: { id: 2, name: 'Andrew' }, +// }); +// }); + +// test.skip('Get user with invitee + where + partial. Did not select users id, but used it in where', async (t) => { +// const { singlestoreDb: db } = t; + +// await db.insert(usersTable).values([ +// { id: 1, name: 'Dan' }, +// { id: 2, name: 'Andrew' }, +// { id: 3, name: 'Alex', invitedBy: 1 }, +// { id: 4, name: 'John', invitedBy: 2 }, +// ]); + +// const usersWithInvitee = await db.query.usersTable.findMany({ +// where: (users, { eq, or }) => (or(eq(users.id, 3), eq(users.id, 4))), +// columns: { +// name: true, +// }, +// with: { +// invitee: { +// columns: { +// id: true, +// name: true, +// }, +// }, +// }, +// }); + +// expectTypeOf(usersWithInvitee).toEqualTypeOf< +// { +// name: string; +// invitee: { +// id: number; +// name: string; +// } | null; +// }[] +// >(); + +// expect(usersWithInvitee.length).eq(2); +// expect(usersWithInvitee[0]?.invitee).not.toBeNull(); +// expect(usersWithInvitee[1]?.invitee).not.toBeNull(); + +// expect(usersWithInvitee).toContainEqual({ +// name: 'Alex', +// invitee: { id: 1, name: 'Dan' }, +// }); +// expect(usersWithInvitee).toContainEqual({ +// name: 'John', +// invitee: { id: 2, name: 'Andrew' }, +// }); +// }); + +// test.skip('Get user with invitee + where + partial(true+false)', async (t) => { +// const { singlestoreDb: db } = t; + +// await db.insert(usersTable).values([ +// { id: 1, name: 'Dan' }, +// { id: 2, name: 'Andrew' }, +// { id: 3, name: 'Alex', invitedBy: 1 }, +// { id: 4, name: 'John', invitedBy: 2 }, +// ]); + +// const usersWithInvitee = await db.query.usersTable.findMany({ +// where: (users, { eq, or }) => (or(eq(users.id, 3), eq(users.id, 4))), +// columns: { +// id: true, +// name: true, +// verified: false, +// }, +// with: { +// invitee: { +// columns: { +// id: true, +// name: true, +// verified: false, +// }, +// }, +// }, +// }); + +// expectTypeOf(usersWithInvitee).toEqualTypeOf< +// { +// id: number; +// name: string; +// invitee: { +// id: number; +// name: string; +// } | null; +// }[] +// >(); + +// expect(usersWithInvitee.length).eq(2); +// expect(usersWithInvitee[0]?.invitee).not.toBeNull(); +// expect(usersWithInvitee[1]?.invitee).not.toBeNull(); + +// expect(usersWithInvitee).toContainEqual({ +// id: 3, +// name: 'Alex', +// invitee: { id: 1, name: 'Dan' }, +// }); +// expect(usersWithInvitee).toContainEqual({ +// id: 4, +// name: 'John', +// invitee: { id: 2, name: 'Andrew' }, +// }); +// }); + +// test.skip('Get user with invitee + where + partial(false)', async (t) => { +// const { singlestoreDb: db } = t; + +// await db.insert(usersTable).values([ +// { id: 1, name: 'Dan' }, +// { id: 2, name: 'Andrew' }, +// { id: 3, name: 'Alex', invitedBy: 1 }, +// { id: 4, name: 'John', invitedBy: 2 }, +// ]); + +// const usersWithInvitee = await db.query.usersTable.findMany({ +// where: (users, { eq, or }) => (or(eq(users.id, 3), eq(users.id, 4))), +// columns: { +// verified: false, +// }, +// with: { +// invitee: { +// columns: { +// name: false, +// }, +// }, +// }, +// }); + +// expectTypeOf(usersWithInvitee).toEqualTypeOf< +// { +// id: number; +// name: string; +// invitedBy: number | null; +// invitee: { +// id: number; +// verified: boolean; +// invitedBy: number | null; +// } | null; +// }[] +// >(); + +// expect(usersWithInvitee.length).eq(2); +// expect(usersWithInvitee[0]?.invitee).not.toBeNull(); +// expect(usersWithInvitee[1]?.invitee).not.toBeNull(); + +// expect(usersWithInvitee).toContainEqual({ +// id: 3, +// name: 'Alex', +// invitedBy: 1, +// invitee: { id: 1, verified: false, invitedBy: null }, +// }); +// expect(usersWithInvitee).toContainEqual({ +// id: 4, +// name: 'John', +// invitedBy: 2, +// invitee: { id: 2, verified: false, invitedBy: null }, +// }); +// }); + +// /* +// Two first-level relations users+users and users+posts +// */ + +// test.skip('Get user with invitee and posts', async (t) => { +// const { singlestoreDb: db } = t; + +// await db.insert(usersTable).values([ +// { id: 1, name: 'Dan' }, +// { id: 2, name: 'Andrew' }, +// { id: 3, name: 'Alex', invitedBy: 1 }, +// { id: 4, name: 'John', invitedBy: 2 }, +// ]); + +// await db.insert(postsTable).values([ +// { ownerId: 1, content: 'Post1' }, +// { ownerId: 2, content: 'Post2' }, +// { ownerId: 3, content: 'Post3' }, +// ]); + +// const response = await db.query.usersTable.findMany({ +// with: { +// invitee: true, +// posts: true, +// }, +// }); + +// expectTypeOf(response).toEqualTypeOf< +// { +// id: number; +// name: string; +// verified: boolean; +// invitedBy: number | null; +// posts: { id: number; ownerId: number | null; content: string; createdAt: Date }[]; +// invitee: { +// id: number; +// name: string; +// verified: boolean; +// invitedBy: number | null; +// } | null; +// }[] +// >(); + +// response.sort((a, b) => (a.id > b.id) ? 1 : -1); + +// expect(response.length).eq(4); + +// expect(response[0]?.invitee).toBeNull(); +// expect(response[1]?.invitee).toBeNull(); +// expect(response[2]?.invitee).not.toBeNull(); +// expect(response[3]?.invitee).not.toBeNull(); + +// expect(response[0]?.posts.length).eq(1); +// expect(response[1]?.posts.length).eq(1); +// expect(response[2]?.posts.length).eq(1); + +// expect(response).toContainEqual({ +// id: 1, +// name: 'Dan', +// verified: false, +// invitedBy: null, +// invitee: null, +// posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: response[0]?.posts[0]?.createdAt }], +// }); +// expect(response).toContainEqual({ +// id: 2, +// name: 'Andrew', +// verified: false, +// invitedBy: null, +// invitee: null, +// posts: [{ id: 2, ownerId: 2, content: 'Post2', createdAt: response[1]?.posts[0]?.createdAt }], +// }); +// expect(response).toContainEqual({ +// id: 3, +// name: 'Alex', +// verified: false, +// invitedBy: 1, +// invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, +// posts: [{ id: 3, ownerId: 3, content: 'Post3', createdAt: response[2]?.posts[0]?.createdAt }], +// }); +// expect(response).toContainEqual({ +// id: 4, +// name: 'John', +// verified: false, +// invitedBy: 2, +// invitee: { id: 2, name: 'Andrew', verified: false, invitedBy: null }, +// posts: [], +// }); +// }); + +// test.skip('Get user with invitee and posts + limit posts and users', async (t) => { +// const { singlestoreDb: db } = t; + +// await db.insert(usersTable).values([ +// { id: 1, name: 'Dan' }, +// { id: 2, name: 'Andrew' }, +// { id: 3, name: 'Alex', invitedBy: 1 }, +// { id: 4, name: 'John', invitedBy: 2 }, +// ]); + +// await db.insert(postsTable).values([ +// { ownerId: 1, content: 'Post1' }, +// { ownerId: 1, content: 'Post1.1' }, +// { ownerId: 2, content: 'Post2' }, +// { ownerId: 2, content: 'Post2.1' }, +// { ownerId: 3, content: 'Post3' }, +// { ownerId: 3, content: 'Post3.1' }, +// ]); + +// const response = await db.query.usersTable.findMany({ +// limit: 3, +// with: { +// invitee: true, +// posts: { +// limit: 1, +// }, +// }, +// }); + +// expectTypeOf(response).toEqualTypeOf< +// { +// id: number; +// name: string; +// verified: boolean; +// invitedBy: number | null; +// posts: { id: number; ownerId: number | null; content: string; createdAt: Date }[]; +// invitee: { +// id: number; +// name: string; +// verified: boolean; +// invitedBy: number | null; +// } | null; +// }[] +// >(); + +// response.sort((a, b) => (a.id > b.id) ? 1 : -1); + +// expect(response.length).eq(3); + +// expect(response[0]?.invitee).toBeNull(); +// expect(response[1]?.invitee).toBeNull(); +// expect(response[2]?.invitee).not.toBeNull(); + +// expect(response[0]?.posts.length).eq(1); +// expect(response[1]?.posts.length).eq(1); +// expect(response[2]?.posts.length).eq(1); + +// expect(response).toContainEqual({ +// id: 1, +// name: 'Dan', +// verified: false, +// invitedBy: null, +// invitee: null, +// posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: response[0]?.posts[0]?.createdAt }], +// }); +// expect(response).toContainEqual({ +// id: 2, +// name: 'Andrew', +// verified: false, +// invitedBy: null, +// invitee: null, +// posts: [{ id: 3, ownerId: 2, content: 'Post2', createdAt: response[1]?.posts[0]?.createdAt }], +// }); +// expect(response).toContainEqual({ +// id: 3, +// name: 'Alex', +// verified: false, +// invitedBy: 1, +// invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, +// posts: [{ id: 5, ownerId: 3, content: 'Post3', createdAt: response[2]?.posts[0]?.createdAt }], +// }); +// }); + +// test.skip('Get user with invitee and posts + limits + custom fields in each', async (t) => { +// const { singlestoreDb: db } = t; + +// await db.insert(usersTable).values([ +// { id: 1, name: 'Dan' }, +// { id: 2, name: 'Andrew' }, +// { id: 3, name: 'Alex', invitedBy: 1 }, +// { id: 4, name: 'John', invitedBy: 2 }, +// ]); + +// await db.insert(postsTable).values([ +// { ownerId: 1, content: 'Post1' }, +// { ownerId: 1, content: 'Post1.1' }, +// { ownerId: 2, content: 'Post2' }, +// { ownerId: 2, content: 'Post2.1' }, +// { ownerId: 3, content: 'Post3' }, +// { ownerId: 3, content: 'Post3.1' }, +// ]); + +// const response = await db.query.usersTable.findMany({ +// limit: 3, +// extras: (users, { sql }) => ({ lower: sql`lower(${users.name})`.as('lower_name') }), +// with: { +// invitee: { +// extras: (users, { sql }) => ({ lower: sql`lower(${users.name})`.as('lower_invitee_name') }), +// }, +// posts: { +// limit: 1, +// extras: (posts, { sql }) => ({ lower: sql`lower(${posts.content})`.as('lower_content') }), +// }, +// }, +// }); + +// expectTypeOf(response).toEqualTypeOf< +// { +// id: number; +// name: string; +// verified: boolean; +// lower: string; +// invitedBy: number | null; +// posts: { id: number; lower: string; ownerId: number | null; content: string; createdAt: Date }[]; +// invitee: { +// id: number; +// name: string; +// lower: string; +// verified: boolean; +// invitedBy: number | null; +// } | null; +// }[] +// >(); + +// response.sort((a, b) => (a.id > b.id) ? 1 : -1); + +// expect(response.length).eq(3); + +// expect(response[0]?.invitee).toBeNull(); +// expect(response[1]?.invitee).toBeNull(); +// expect(response[2]?.invitee).not.toBeNull(); + +// expect(response[0]?.posts.length).eq(1); +// expect(response[1]?.posts.length).eq(1); +// expect(response[2]?.posts.length).eq(1); + +// expect(response).toContainEqual({ +// id: 1, +// name: 'Dan', +// lower: 'dan', +// verified: false, +// invitedBy: null, +// invitee: null, +// posts: [{ id: 1, ownerId: 1, content: 'Post1', lower: 'post1', createdAt: response[0]?.posts[0]?.createdAt }], +// }); +// expect(response).toContainEqual({ +// id: 2, +// name: 'Andrew', +// lower: 'andrew', +// verified: false, +// invitedBy: null, +// invitee: null, +// posts: [{ id: 3, ownerId: 2, content: 'Post2', lower: 'post2', createdAt: response[1]?.posts[0]?.createdAt }], +// }); +// expect(response).toContainEqual({ +// id: 3, +// name: 'Alex', +// lower: 'alex', +// verified: false, +// invitedBy: 1, +// invitee: { id: 1, name: 'Dan', lower: 'dan', verified: false, invitedBy: null }, +// posts: [{ id: 5, ownerId: 3, content: 'Post3', lower: 'post3', createdAt: response[2]?.posts[0]?.createdAt }], +// }); +// }); + +// test.skip('Get user with invitee and posts + custom fields in each', async () => { +// await db.insert(usersTable).values([ +// { id: 1, name: 'Dan' }, +// { id: 2, name: 'Andrew' }, +// { id: 3, name: 'Alex', invitedBy: 1 }, +// { id: 4, name: 'John', invitedBy: 2 }, +// ]); + +// await db.insert(postsTable).values([ +// { ownerId: 1, content: 'Post1' }, +// { ownerId: 1, content: 'Post1.1' }, +// { ownerId: 2, content: 'Post2' }, +// { ownerId: 2, content: 'Post2.1' }, +// { ownerId: 3, content: 'Post3' }, +// { ownerId: 3, content: 'Post3.1' }, +// ]); + +// const response = await db.query.usersTable.findMany({ +// extras: (users, { sql }) => ({ lower: sql`lower(${users.name})`.as('lower_name') }), +// with: { +// invitee: { +// extras: (users, { sql }) => ({ lower: sql`lower(${users.name})`.as('lower_name') }), +// }, +// posts: { +// extras: (posts, { sql }) => ({ lower: sql`lower(${posts.content})`.as('lower_name') }), +// }, +// }, +// }); + +// expectTypeOf(response).toEqualTypeOf< +// { +// id: number; +// name: string; +// verified: boolean; +// lower: string; +// invitedBy: number | null; +// posts: { id: number; lower: string; ownerId: number | null; content: string; createdAt: Date }[]; +// invitee: { +// id: number; +// name: string; +// lower: string; +// verified: boolean; +// invitedBy: number | null; +// } | null; +// }[] +// >(); + +// response.sort((a, b) => (a.id > b.id) ? 1 : -1); + +// response[0]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); +// response[1]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); +// response[2]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); + +// expect(response.length).eq(4); + +// expect(response[0]?.invitee).toBeNull(); +// expect(response[1]?.invitee).toBeNull(); +// expect(response[2]?.invitee).not.toBeNull(); +// expect(response[3]?.invitee).not.toBeNull(); + +// expect(response[0]?.posts.length).eq(2); +// expect(response[1]?.posts.length).eq(2); +// expect(response[2]?.posts.length).eq(2); +// expect(response[3]?.posts.length).eq(0); + +// expect(response).toContainEqual({ +// id: 1, +// name: 'Dan', +// lower: 'dan', +// verified: false, +// invitedBy: null, +// invitee: null, +// posts: [{ id: 1, ownerId: 1, content: 'Post1', lower: 'post1', createdAt: response[0]?.posts[0]?.createdAt }, { +// id: 2, +// ownerId: 1, +// content: 'Post1.1', +// lower: 'post1.1', +// createdAt: response[0]?.posts[1]?.createdAt, +// }], +// }); +// expect(response).toContainEqual({ +// id: 2, +// name: 'Andrew', +// lower: 'andrew', +// verified: false, +// invitedBy: null, +// invitee: null, +// posts: [{ id: 3, ownerId: 2, content: 'Post2', lower: 'post2', createdAt: response[1]?.posts[0]?.createdAt }, { +// id: 4, +// ownerId: 2, +// content: 'Post2.1', +// lower: 'post2.1', +// createdAt: response[1]?.posts[1]?.createdAt, +// }], +// }); +// expect(response).toContainEqual({ +// id: 3, +// name: 'Alex', +// lower: 'alex', +// verified: false, +// invitedBy: 1, +// invitee: { id: 1, name: 'Dan', lower: 'dan', verified: false, invitedBy: null }, +// posts: [{ id: 5, ownerId: 3, content: 'Post3', lower: 'post3', createdAt: response[2]?.posts[0]?.createdAt }, { +// id: 6, +// ownerId: 3, +// content: 'Post3.1', +// lower: 'post3.1', +// createdAt: response[2]?.posts[1]?.createdAt, +// }], +// }); +// expect(response).toContainEqual({ +// id: 4, +// name: 'John', +// lower: 'john', +// verified: false, +// invitedBy: 2, +// invitee: { id: 2, name: 'Andrew', lower: 'andrew', verified: false, invitedBy: null }, +// posts: [], +// }); +// }); + +// test.skip('Get user with invitee and posts + orderBy', async (t) => { +// const { singlestoreDb: db } = t; + +// await db.insert(usersTable).values([ +// { id: 1, name: 'Dan' }, +// { id: 2, name: 'Andrew' }, +// { id: 3, name: 'Alex', invitedBy: 1 }, +// { id: 4, name: 'John', invitedBy: 2 }, +// ]); + +// await db.insert(postsTable).values([ +// { ownerId: 1, content: 'Post1' }, +// { ownerId: 1, content: 'Post1.1' }, +// { ownerId: 2, content: 'Post2' }, +// { ownerId: 2, content: 'Post2.1' }, +// { ownerId: 3, content: 'Post3' }, +// ]); + +// const response = await db.query.usersTable.findMany({ +// orderBy: (users, { desc }) => [desc(users.id)], +// with: { +// invitee: true, +// posts: { +// orderBy: (posts, { desc }) => [desc(posts.id)], +// }, +// }, +// }); + +// expectTypeOf(response).toEqualTypeOf< +// { +// id: number; +// name: string; +// verified: boolean; +// invitedBy: number | null; +// posts: { id: number; ownerId: number | null; content: string; createdAt: Date }[]; +// invitee: { +// id: number; +// name: string; +// verified: boolean; +// invitedBy: number | null; +// } | null; +// }[] +// >(); + +// expect(response.length).eq(4); + +// expect(response[3]?.invitee).toBeNull(); +// expect(response[2]?.invitee).toBeNull(); +// expect(response[1]?.invitee).not.toBeNull(); +// expect(response[0]?.invitee).not.toBeNull(); + +// expect(response[0]?.posts.length).eq(0); +// expect(response[1]?.posts.length).eq(1); +// expect(response[2]?.posts.length).eq(2); +// expect(response[3]?.posts.length).eq(2); + +// expect(response[3]).toEqual({ +// id: 1, +// name: 'Dan', +// verified: false, +// invitedBy: null, +// invitee: null, +// posts: [{ id: 2, ownerId: 1, content: 'Post1.1', createdAt: response[3]?.posts[0]?.createdAt }, { +// id: 1, +// ownerId: 1, +// content: 'Post1', +// createdAt: response[3]?.posts[1]?.createdAt, +// }], +// }); +// expect(response[2]).toEqual({ +// id: 2, +// name: 'Andrew', +// verified: false, +// invitedBy: null, +// invitee: null, +// posts: [{ id: 4, ownerId: 2, content: 'Post2.1', createdAt: response[2]?.posts[0]?.createdAt }, { +// id: 3, +// ownerId: 2, +// content: 'Post2', +// createdAt: response[2]?.posts[1]?.createdAt, +// }], +// }); +// expect(response[1]).toEqual({ +// id: 3, +// name: 'Alex', +// verified: false, +// invitedBy: 1, +// invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, +// posts: [{ +// id: 5, +// ownerId: 3, +// content: 'Post3', +// createdAt: response[3]?.posts[1]?.createdAt, +// }], +// }); +// expect(response[0]).toEqual({ +// id: 4, +// name: 'John', +// verified: false, +// invitedBy: 2, +// invitee: { id: 2, name: 'Andrew', verified: false, invitedBy: null }, +// posts: [], +// }); +// }); + +// test.skip('Get user with invitee and posts + where', async (t) => { +// const { singlestoreDb: db } = t; + +// await db.insert(usersTable).values([ +// { id: 1, name: 'Dan' }, +// { id: 2, name: 'Andrew' }, +// { id: 3, name: 'Alex', invitedBy: 1 }, +// { id: 4, name: 'John', invitedBy: 2 }, +// ]); + +// await db.insert(postsTable).values([ +// { ownerId: 1, content: 'Post1' }, +// { ownerId: 2, content: 'Post2' }, +// { ownerId: 3, content: 'Post3' }, +// ]); + +// const response = await db.query.usersTable.findMany({ +// where: (users, { eq, or }) => (or(eq(users.id, 2), eq(users.id, 3))), +// with: { +// invitee: true, +// posts: { +// where: (posts, { eq }) => (eq(posts.ownerId, 2)), +// }, +// }, +// }); + +// expectTypeOf(response).toEqualTypeOf< +// { +// id: number; +// name: string; +// verified: boolean; +// invitedBy: number | null; +// posts: { id: number; ownerId: number | null; content: string; createdAt: Date }[]; +// invitee: { +// id: number; +// name: string; +// verified: boolean; +// invitedBy: number | null; +// } | null; +// }[] +// >(); + +// response.sort((a, b) => (a.id > b.id) ? 1 : -1); + +// expect(response.length).eq(2); + +// expect(response[0]?.invitee).toBeNull(); +// expect(response[1]?.invitee).not.toBeNull(); + +// expect(response[0]?.posts.length).eq(1); +// expect(response[1]?.posts.length).eq(0); + +// expect(response).toContainEqual({ +// id: 2, +// name: 'Andrew', +// verified: false, +// invitedBy: null, +// invitee: null, +// posts: [{ id: 2, ownerId: 2, content: 'Post2', createdAt: response[0]?.posts[0]?.createdAt }], +// }); +// expect(response).toContainEqual({ +// id: 3, +// name: 'Alex', +// verified: false, +// invitedBy: 1, +// invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, +// posts: [], +// }); +// }); + +// test.skip('Get user with invitee and posts + limit posts and users + where', async (t) => { +// const { singlestoreDb: db } = t; + +// await db.insert(usersTable).values([ +// { id: 1, name: 'Dan' }, +// { id: 2, name: 'Andrew' }, +// { id: 3, name: 'Alex', invitedBy: 1 }, +// { id: 4, name: 'John', invitedBy: 2 }, +// ]); + +// await db.insert(postsTable).values([ +// { ownerId: 1, content: 'Post1' }, +// { ownerId: 1, content: 'Post1.1' }, +// { ownerId: 2, content: 'Post2' }, +// { ownerId: 2, content: 'Post2.1' }, +// { ownerId: 3, content: 'Post3' }, +// { ownerId: 3, content: 'Post3.1' }, +// ]); + +// const response = await db.query.usersTable.findMany({ +// where: (users, { eq, or }) => (or(eq(users.id, 3), eq(users.id, 4))), +// limit: 1, +// with: { +// invitee: true, +// posts: { +// where: (posts, { eq }) => (eq(posts.ownerId, 3)), +// limit: 1, +// }, +// }, +// }); + +// expectTypeOf(response).toEqualTypeOf< +// { +// id: number; +// name: string; +// verified: boolean; +// invitedBy: number | null; +// posts: { id: number; ownerId: number | null; content: string; createdAt: Date }[]; +// invitee: { +// id: number; +// name: string; +// verified: boolean; +// invitedBy: number | null; +// } | null; +// }[] +// >(); + +// expect(response.length).eq(1); + +// expect(response[0]?.invitee).not.toBeNull(); +// expect(response[0]?.posts.length).eq(1); + +// expect(response).toContainEqual({ +// id: 3, +// name: 'Alex', +// verified: false, +// invitedBy: 1, +// invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, +// posts: [{ id: 5, ownerId: 3, content: 'Post3', createdAt: response[0]?.posts[0]?.createdAt }], +// }); +// }); + +// test.skip('Get user with invitee and posts + orderBy + where + custom', async (t) => { +// const { singlestoreDb: db } = t; + +// await db.insert(usersTable).values([ +// { id: 1, name: 'Dan' }, +// { id: 2, name: 'Andrew' }, +// { id: 3, name: 'Alex', invitedBy: 1 }, +// { id: 4, name: 'John', invitedBy: 2 }, +// ]); + +// await db.insert(postsTable).values([ +// { ownerId: 1, content: 'Post1' }, +// { ownerId: 1, content: 'Post1.1' }, +// { ownerId: 2, content: 'Post2' }, +// { ownerId: 2, content: 'Post2.1' }, +// { ownerId: 3, content: 'Post3' }, +// ]); + +// const response = await db.query.usersTable.findMany({ +// orderBy: [desc(usersTable.id)], +// where: or(eq(usersTable.id, 3), eq(usersTable.id, 4)), +// extras: { +// lower: sql`lower(${usersTable.name})`.as('lower_name'), +// }, +// with: { +// invitee: true, +// posts: { +// where: eq(postsTable.ownerId, 3), +// orderBy: [desc(postsTable.id)], +// extras: { +// lower: sql`lower(${postsTable.content})`.as('lower_name'), +// }, +// }, +// }, +// }); + +// expectTypeOf(response).toEqualTypeOf< +// { +// id: number; +// name: string; +// verified: boolean; +// invitedBy: number | null; +// lower: string; +// posts: { id: number; lower: string; ownerId: number | null; content: string; createdAt: Date }[]; +// invitee: { +// id: number; +// name: string; +// verified: boolean; +// invitedBy: number | null; +// } | null; +// }[] +// >(); + +// expect(response.length).eq(2); + +// expect(response[1]?.invitee).not.toBeNull(); +// expect(response[0]?.invitee).not.toBeNull(); + +// expect(response[0]?.posts.length).eq(0); +// expect(response[1]?.posts.length).eq(1); + +// expect(response[1]).toEqual({ +// id: 3, +// name: 'Alex', +// lower: 'alex', +// verified: false, +// invitedBy: 1, +// invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, +// posts: [{ +// id: 5, +// ownerId: 3, +// content: 'Post3', +// lower: 'post3', +// createdAt: response[1]?.posts[0]?.createdAt, +// }], +// }); +// expect(response[0]).toEqual({ +// id: 4, +// name: 'John', +// lower: 'john', +// verified: false, +// invitedBy: 2, +// invitee: { id: 2, name: 'Andrew', verified: false, invitedBy: null }, +// posts: [], +// }); +// }); + +// test.skip('Get user with invitee and posts + orderBy + where + partial + custom', async (t) => { +// const { singlestoreDb: db } = t; + +// await db.insert(usersTable).values([ +// { id: 1, name: 'Dan' }, +// { id: 2, name: 'Andrew' }, +// { id: 3, name: 'Alex', invitedBy: 1 }, +// { id: 4, name: 'John', invitedBy: 2 }, +// ]); + +// await db.insert(postsTable).values([ +// { ownerId: 1, content: 'Post1' }, +// { ownerId: 1, content: 'Post1.1' }, +// { ownerId: 2, content: 'Post2' }, +// { ownerId: 2, content: 'Post2.1' }, +// { ownerId: 3, content: 'Post3' }, +// ]); + +// const response = await db.query.usersTable.findMany({ +// orderBy: [desc(usersTable.id)], +// where: or(eq(usersTable.id, 3), eq(usersTable.id, 4)), +// extras: { +// lower: sql`lower(${usersTable.name})`.as('lower_name'), +// }, +// columns: { +// id: true, +// name: true, +// }, +// with: { +// invitee: { +// columns: { +// id: true, +// name: true, +// }, +// extras: { +// lower: sql`lower(${usersTable.name})`.as('lower_name'), +// }, +// }, +// posts: { +// columns: { +// id: true, +// content: true, +// }, +// where: eq(postsTable.ownerId, 3), +// orderBy: [desc(postsTable.id)], +// extras: { +// lower: sql`lower(${postsTable.content})`.as('lower_name'), +// }, +// }, +// }, +// }); + +// expectTypeOf(response).toEqualTypeOf< +// { +// id: number; +// name: string; +// lower: string; +// posts: { id: number; lower: string; content: string }[]; +// invitee: { +// id: number; +// name: string; +// lower: string; +// } | null; +// }[] +// >(); + +// expect(response.length).eq(2); + +// expect(response[1]?.invitee).not.toBeNull(); +// expect(response[0]?.invitee).not.toBeNull(); + +// expect(response[0]?.posts.length).eq(0); +// expect(response[1]?.posts.length).eq(1); + +// expect(response[1]).toEqual({ +// id: 3, +// name: 'Alex', +// lower: 'alex', +// invitee: { id: 1, name: 'Dan', lower: 'dan' }, +// posts: [{ +// id: 5, +// content: 'Post3', +// lower: 'post3', +// }], +// }); +// expect(response[0]).toEqual({ +// id: 4, +// name: 'John', +// lower: 'john', +// invitee: { id: 2, name: 'Andrew', lower: 'andrew' }, +// posts: [], +// }); +// }); + +// /* +// One two-level relation users+posts+comments +// */ + +// test.skip('Get user with posts and posts with comments', async (t) => { +// const { singlestoreDb: db } = t; + +// await db.insert(usersTable).values([ +// { id: 1, name: 'Dan' }, +// { id: 2, name: 'Andrew' }, +// { id: 3, name: 'Alex' }, +// ]); + +// await db.insert(postsTable).values([ +// { id: 1, ownerId: 1, content: 'Post1' }, +// { id: 2, ownerId: 2, content: 'Post2' }, +// { id: 3, ownerId: 3, content: 'Post3' }, +// ]); + +// await db.insert(commentsTable).values([ +// { postId: 1, content: 'Comment1', creator: 2 }, +// { postId: 2, content: 'Comment2', creator: 2 }, +// { postId: 3, content: 'Comment3', creator: 3 }, +// ]); + +// const response = await db.query.usersTable.findMany({ +// with: { +// posts: { +// with: { +// comments: true, +// }, +// }, +// }, +// }); + +// expectTypeOf(response).toEqualTypeOf< +// { +// id: number; +// name: string; +// verified: boolean; +// invitedBy: number | null; +// posts: { +// id: number; +// content: string; +// ownerId: number | null; +// createdAt: Date; +// comments: { +// id: number; +// content: string; +// createdAt: Date; +// creator: number | null; +// postId: number | null; +// }[]; +// }[]; +// }[] +// >(); + +// response.sort((a, b) => (a.id > b.id) ? 1 : -1); + +// expect(response.length).eq(3); +// expect(response[0]?.posts.length).eq(1); +// expect(response[1]?.posts.length).eq(1); +// expect(response[2]?.posts.length).eq(1); + +// expect(response[0]?.posts[0]?.comments.length).eq(1); +// expect(response[1]?.posts[0]?.comments.length).eq(1); +// expect(response[2]?.posts[0]?.comments.length).eq(1); + +// expect(response[0]).toEqual({ +// id: 1, +// name: 'Dan', +// verified: false, +// invitedBy: null, +// posts: [{ +// id: 1, +// ownerId: 1, +// content: 'Post1', +// createdAt: response[0]?.posts[0]?.createdAt, +// comments: [ +// { +// id: 1, +// content: 'Comment1', +// creator: 2, +// postId: 1, +// createdAt: response[0]?.posts[0]?.comments[0]?.createdAt, +// }, +// ], +// }], +// }); +// expect(response[1]).toEqual({ +// id: 2, +// name: 'Andrew', +// verified: false, +// invitedBy: null, +// posts: [{ +// id: 2, +// ownerId: 2, +// content: 'Post2', +// createdAt: response[1]?.posts[0]?.createdAt, +// comments: [ +// { +// id: 2, +// content: 'Comment2', +// creator: 2, +// postId: 2, +// createdAt: response[1]?.posts[0]?.comments[0]?.createdAt, +// }, +// ], +// }], +// }); +// // expect(response[2]).toEqual({ +// // id: 3, +// // name: 'Alex', +// // verified: false, +// // invitedBy: null, +// // posts: [{ +// // id: 3, +// // ownerId: 3, +// // content: 'Post3', +// // createdAt: response[2]?.posts[0]?.createdAt, +// // comments: [ +// // { +// // id: , +// // content: 'Comment3', +// // creator: 3, +// // postId: 3, +// // createdAt: response[2]?.posts[0]?.comments[0]?.createdAt, +// // }, +// // ], +// // }], +// // }); +// }); + +// // Get user with limit posts and limit comments + +// // Get user with custom field + post + comment with custom field + +// // Get user with limit + posts orderBy + comment orderBy + +// // Get user with where + posts where + comment where + +// // Get user with where + posts partial where + comment where + +// // Get user with where + posts partial where + comment partial(false) where + +// // Get user with where partial(false) + posts partial where partial(false) + comment partial(false+true) where + +// // Get user with where + posts partial where + comment where. Didn't select field from where in posts + +// // Get user with where + posts partial where + comment where. Didn't select field from where for all + +// // Get with limit+offset in each + +// /* +// One two-level + One first-level relation users+posts+comments and users+users +// */ + +// /* +// One three-level relation users+posts+comments+comment_owner +// */ + +// test.skip('Get user with posts and posts with comments and comments with owner', async (t) => { +// const { singlestoreDb: db } = t; + +// await db.insert(usersTable).values([ +// { id: 1, name: 'Dan' }, +// { id: 2, name: 'Andrew' }, +// { id: 3, name: 'Alex' }, +// ]); + +// await db.insert(postsTable).values([ +// { id: 1, ownerId: 1, content: 'Post1' }, +// { id: 2, ownerId: 2, content: 'Post2' }, +// { id: 3, ownerId: 3, content: 'Post3' }, +// ]); + +// await db.insert(commentsTable).values([ +// { postId: 1, content: 'Comment1', creator: 2 }, +// { postId: 2, content: 'Comment2', creator: 2 }, +// { postId: 3, content: 'Comment3', creator: 3 }, +// ]); + +// const response = await db.query.usersTable.findMany({ +// with: { +// posts: { +// with: { +// comments: { +// with: { +// author: true, +// }, +// }, +// }, +// }, +// }, +// }); + +// expectTypeOf(response).toEqualTypeOf<{ +// id: number; +// name: string; +// verified: boolean; +// invitedBy: number | null; +// posts: { +// id: number; +// content: string; +// ownerId: number | null; +// createdAt: Date; +// comments: { +// id: number; +// content: string; +// createdAt: Date; +// creator: number | null; +// postId: number | null; +// author: { +// id: number; +// name: string; +// verified: boolean; +// invitedBy: number | null; +// } | null; +// }[]; +// }[]; +// }[]>(); + +// response.sort((a, b) => (a.id > b.id) ? 1 : -1); + +// expect(response.length).eq(3); +// expect(response[0]?.posts.length).eq(1); +// expect(response[1]?.posts.length).eq(1); +// expect(response[2]?.posts.length).eq(1); + +// expect(response[0]?.posts[0]?.comments.length).eq(1); +// expect(response[1]?.posts[0]?.comments.length).eq(1); +// expect(response[2]?.posts[0]?.comments.length).eq(1); + +// expect(response[0]).toEqual({ +// id: 1, +// name: 'Dan', +// verified: false, +// invitedBy: null, +// posts: [{ +// id: 1, +// ownerId: 1, +// content: 'Post1', +// createdAt: response[0]?.posts[0]?.createdAt, +// comments: [ +// { +// id: 1, +// content: 'Comment1', +// creator: 2, +// author: { +// id: 2, +// name: 'Andrew', +// verified: false, +// invitedBy: null, +// }, +// postId: 1, +// createdAt: response[0]?.posts[0]?.comments[0]?.createdAt, +// }, +// ], +// }], +// }); +// expect(response[1]).toEqual({ +// id: 2, +// name: 'Andrew', +// verified: false, +// invitedBy: null, +// posts: [{ +// id: 2, +// ownerId: 2, +// content: 'Post2', +// createdAt: response[1]?.posts[0]?.createdAt, +// comments: [ +// { +// id: 2, +// content: 'Comment2', +// creator: 2, +// author: { +// id: 2, +// name: 'Andrew', +// verified: false, +// invitedBy: null, +// }, +// postId: 2, +// createdAt: response[1]?.posts[0]?.comments[0]?.createdAt, +// }, +// ], +// }], +// }); +// }); + +// test.skip('Get user with posts and posts with comments and comments with owner where exists', async () => { +// await db.insert(usersTable).values([ +// { id: 1, name: 'Dan' }, +// { id: 2, name: 'Andrew' }, +// { id: 3, name: 'Alex' }, +// ]); + +// await db.insert(postsTable).values([ +// { id: 1, ownerId: 1, content: 'Post1' }, +// { id: 2, ownerId: 2, content: 'Post2' }, +// { id: 3, ownerId: 3, content: 'Post3' }, +// ]); + +// await db.insert(commentsTable).values([ +// { postId: 1, content: 'Comment1', creator: 2 }, +// { postId: 2, content: 'Comment2', creator: 2 }, +// { postId: 3, content: 'Comment3', creator: 3 }, +// ]); + +// const response = await db.query.usersTable.findMany({ +// with: { +// posts: { +// with: { +// comments: { +// with: { +// author: true, +// }, +// }, +// }, +// }, +// }, +// where: (table, { exists, eq }) => exists(db.select({ one: sql`1` }).from(usersTable).where(eq(sql`1`, table.id))), +// }); + +// expectTypeOf(response).toEqualTypeOf<{ +// id: number; +// name: string; +// verified: boolean; +// invitedBy: number | null; +// posts: { +// id: number; +// content: string; +// ownerId: number | null; +// createdAt: Date; +// comments: { +// id: number; +// content: string; +// createdAt: Date; +// creator: number | null; +// postId: number | null; +// author: { +// id: number; +// name: string; +// verified: boolean; +// invitedBy: number | null; +// } | null; +// }[]; +// }[]; +// }[]>(); + +// expect(response.length).eq(1); +// expect(response[0]?.posts.length).eq(1); + +// expect(response[0]?.posts[0]?.comments.length).eq(1); + +// expect(response[0]).toEqual({ +// id: 1, +// name: 'Dan', +// verified: false, +// invitedBy: null, +// posts: [{ +// id: 1, +// ownerId: 1, +// content: 'Post1', +// createdAt: response[0]?.posts[0]?.createdAt, +// comments: [ +// { +// id: 1, +// content: 'Comment1', +// creator: 2, +// author: { +// id: 2, +// name: 'Andrew', +// verified: false, +// invitedBy: null, +// }, +// postId: 1, +// createdAt: response[0]?.posts[0]?.comments[0]?.createdAt, +// }, +// ], +// }], +// }); +// }); + +// /* +// One three-level relation + 1 first-level relatioon +// 1. users+posts+comments+comment_owner +// 2. users+users +// */ + +// /* +// One four-level relation users+posts+comments+coment_likes +// */ + +// /* +// [Find Many] Many-to-many cases + +// Users+users_to_groups+groups +// */ + +// test.skip('[Find Many] Get users with groups', async (t) => { +// const { singlestoreDb: db } = t; + +// await db.insert(usersTable).values([ +// { id: 1, name: 'Dan' }, +// { id: 2, name: 'Andrew' }, +// { id: 3, name: 'Alex' }, +// ]); + +// await db.insert(groupsTable).values([ +// { id: 1, name: 'Group1' }, +// { id: 2, name: 'Group2' }, +// { id: 3, name: 'Group3' }, +// ]); + +// await db.insert(usersToGroupsTable).values([ +// { userId: 1, groupId: 1 }, +// { userId: 2, groupId: 2 }, +// { userId: 3, groupId: 3 }, +// { userId: 3, groupId: 2 }, +// ]); + +// const response = await db.query.usersTable.findMany({ +// with: { +// usersToGroups: { +// columns: {}, +// with: { +// group: true, +// }, +// }, +// }, +// }); + +// expectTypeOf(response).toEqualTypeOf<{ +// id: number; +// name: string; +// verified: boolean; +// invitedBy: number | null; +// usersToGroups: { +// group: { +// id: number; +// name: string; +// description: string | null; +// }; +// }[]; +// }[]>(); + +// response.sort((a, b) => (a.id > b.id) ? 1 : -1); + +// expect(response.length).toEqual(3); + +// expect(response[0]?.usersToGroups.length).toEqual(1); +// expect(response[1]?.usersToGroups.length).toEqual(1); +// expect(response[2]?.usersToGroups.length).toEqual(2); + +// expect(response).toContainEqual({ +// id: 1, +// name: 'Dan', +// verified: false, +// invitedBy: null, +// usersToGroups: [{ +// group: { +// id: 1, +// name: 'Group1', +// description: null, +// }, +// }], +// }); + +// expect(response).toContainEqual({ +// id: 2, +// name: 'Andrew', +// verified: false, +// invitedBy: null, +// usersToGroups: [{ +// group: { +// id: 2, +// name: 'Group2', +// description: null, +// }, +// }], +// }); + +// expect(response).toContainEqual({ +// id: 3, +// name: 'Alex', +// verified: false, +// invitedBy: null, +// usersToGroups: [{ +// group: { +// id: 3, +// name: 'Group3', +// description: null, +// }, +// }, { +// group: { +// id: 2, +// name: 'Group2', +// description: null, +// }, +// }], +// }); +// }); + +// test.skip('[Find Many] Get groups with users', async (t) => { +// const { singlestoreDb: db } = t; + +// await db.insert(usersTable).values([ +// { id: 1, name: 'Dan' }, +// { id: 2, name: 'Andrew' }, +// { id: 3, name: 'Alex' }, +// ]); + +// await db.insert(groupsTable).values([ +// { id: 1, name: 'Group1' }, +// { id: 2, name: 'Group2' }, +// { id: 3, name: 'Group3' }, +// ]); + +// await db.insert(usersToGroupsTable).values([ +// { userId: 1, groupId: 1 }, +// { userId: 2, groupId: 2 }, +// { userId: 3, groupId: 3 }, +// { userId: 3, groupId: 2 }, +// ]); + +// const response = await db.query.groupsTable.findMany({ +// with: { +// usersToGroups: { +// columns: {}, +// with: { +// user: true, +// }, +// }, +// }, +// }); + +// expectTypeOf(response).toEqualTypeOf<{ +// id: number; +// name: string; +// description: string | null; +// usersToGroups: { +// user: { +// id: number; +// name: string; +// verified: boolean; +// invitedBy: number | null; +// }; +// }[]; +// }[]>(); + +// response.sort((a, b) => (a.id > b.id) ? 1 : -1); + +// expect(response.length).toEqual(3); + +// expect(response[0]?.usersToGroups.length).toEqual(1); +// expect(response[1]?.usersToGroups.length).toEqual(2); +// expect(response[2]?.usersToGroups.length).toEqual(1); + +// expect(response).toContainEqual({ +// id: 1, +// name: 'Group1', +// description: null, +// usersToGroups: [{ +// user: { +// id: 1, +// name: 'Dan', +// verified: false, +// invitedBy: null, +// }, +// }], +// }); + +// expect(response).toContainEqual({ +// id: 2, +// name: 'Group2', +// description: null, +// usersToGroups: [{ +// user: { +// id: 2, +// name: 'Andrew', +// verified: false, +// invitedBy: null, +// }, +// }, { +// user: { +// id: 3, +// name: 'Alex', +// verified: false, +// invitedBy: null, +// }, +// }], +// }); + +// expect(response).toContainEqual({ +// id: 3, +// name: 'Group3', +// description: null, +// usersToGroups: [{ +// user: { +// id: 3, +// name: 'Alex', +// verified: false, +// invitedBy: null, +// }, +// }], +// }); +// }); + +// test.skip('[Find Many] Get users with groups + limit', async (t) => { +// const { singlestoreDb: db } = t; + +// await db.insert(usersTable).values([ +// { id: 1, name: 'Dan' }, +// { id: 2, name: 'Andrew' }, +// { id: 3, name: 'Alex' }, +// ]); + +// await db.insert(groupsTable).values([ +// { id: 1, name: 'Group1' }, +// { id: 2, name: 'Group2' }, +// { id: 3, name: 'Group3' }, +// ]); + +// await db.insert(usersToGroupsTable).values([ +// { userId: 1, groupId: 1 }, +// { userId: 2, groupId: 2 }, +// { userId: 2, groupId: 3 }, +// { userId: 3, groupId: 2 }, +// ]); + +// const response = await db.query.usersTable.findMany({ +// limit: 2, +// with: { +// usersToGroups: { +// limit: 1, +// columns: {}, +// with: { +// group: true, +// }, +// }, +// }, +// }); + +// expectTypeOf(response).toEqualTypeOf<{ +// id: number; +// name: string; +// verified: boolean; +// invitedBy: number | null; +// usersToGroups: { +// group: { +// id: number; +// name: string; +// description: string | null; +// }; +// }[]; +// }[]>(); + +// response.sort((a, b) => (a.id > b.id) ? 1 : -1); + +// expect(response.length).toEqual(2); + +// expect(response[0]?.usersToGroups.length).toEqual(1); +// expect(response[1]?.usersToGroups.length).toEqual(1); + +// expect(response).toContainEqual({ +// id: 1, +// name: 'Dan', +// verified: false, +// invitedBy: null, +// usersToGroups: [{ +// group: { +// id: 1, +// name: 'Group1', +// description: null, +// }, +// }], +// }); + +// expect(response).toContainEqual({ +// id: 2, +// name: 'Andrew', +// verified: false, +// invitedBy: null, +// usersToGroups: [{ +// group: { +// id: 2, +// name: 'Group2', +// description: null, +// }, +// }], +// }); +// }); + +// test.skip('[Find Many] Get groups with users + limit', async (t) => { +// const { singlestoreDb: db } = t; + +// await db.insert(usersTable).values([ +// { id: 1, name: 'Dan' }, +// { id: 2, name: 'Andrew' }, +// { id: 3, name: 'Alex' }, +// ]); + +// await db.insert(groupsTable).values([ +// { id: 1, name: 'Group1' }, +// { id: 2, name: 'Group2' }, +// { id: 3, name: 'Group3' }, +// ]); + +// await db.insert(usersToGroupsTable).values([ +// { userId: 1, groupId: 1 }, +// { userId: 2, groupId: 2 }, +// { userId: 3, groupId: 3 }, +// { userId: 3, groupId: 2 }, +// ]); + +// const response = await db.query.groupsTable.findMany({ +// limit: 2, +// with: { +// usersToGroups: { +// limit: 1, +// columns: {}, +// with: { +// user: true, +// }, +// }, +// }, +// }); + +// expectTypeOf(response).toEqualTypeOf<{ +// id: number; +// name: string; +// description: string | null; +// usersToGroups: { +// user: { +// id: number; +// name: string; +// verified: boolean; +// invitedBy: number | null; +// }; +// }[]; +// }[]>(); + +// response.sort((a, b) => (a.id > b.id) ? 1 : -1); + +// expect(response.length).toEqual(2); + +// expect(response[0]?.usersToGroups.length).toEqual(1); +// expect(response[1]?.usersToGroups.length).toEqual(1); + +// expect(response).toContainEqual({ +// id: 1, +// name: 'Group1', +// description: null, +// usersToGroups: [{ +// user: { +// id: 1, +// name: 'Dan', +// verified: false, +// invitedBy: null, +// }, +// }], +// }); + +// expect(response).toContainEqual({ +// id: 2, +// name: 'Group2', +// description: null, +// usersToGroups: [{ +// user: { +// id: 2, +// name: 'Andrew', +// verified: false, +// invitedBy: null, +// }, +// }], +// }); +// }); + +// test.skip('[Find Many] Get users with groups + limit + where', async (t) => { +// const { singlestoreDb: db } = t; + +// await db.insert(usersTable).values([ +// { id: 1, name: 'Dan' }, +// { id: 2, name: 'Andrew' }, +// { id: 3, name: 'Alex' }, +// ]); + +// await db.insert(groupsTable).values([ +// { id: 1, name: 'Group1' }, +// { id: 2, name: 'Group2' }, +// { id: 3, name: 'Group3' }, +// ]); + +// await db.insert(usersToGroupsTable).values([ +// { userId: 1, groupId: 1 }, +// { userId: 2, groupId: 2 }, +// { userId: 2, groupId: 3 }, +// { userId: 3, groupId: 2 }, +// ]); + +// const response = await db.query.usersTable.findMany({ +// limit: 1, +// where: (_, { eq, or }) => or(eq(usersTable.id, 1), eq(usersTable.id, 2)), +// with: { +// usersToGroups: { +// where: eq(usersToGroupsTable.groupId, 1), +// columns: {}, +// with: { +// group: true, +// }, +// }, +// }, +// }); + +// expectTypeOf(response).toEqualTypeOf<{ +// id: number; +// name: string; +// verified: boolean; +// invitedBy: number | null; +// usersToGroups: { +// group: { +// id: number; +// name: string; +// description: string | null; +// }; +// }[]; +// }[]>(); + +// response.sort((a, b) => (a.id > b.id) ? 1 : -1); + +// expect(response.length).toEqual(1); + +// expect(response[0]?.usersToGroups.length).toEqual(1); + +// expect(response).toContainEqual({ +// id: 1, +// name: 'Dan', +// verified: false, +// invitedBy: null, +// usersToGroups: [{ +// group: { +// id: 1, +// name: 'Group1', +// description: null, +// }, +// }], +// }); +// }); + +// test.skip('[Find Many] Get groups with users + limit + where', async (t) => { +// const { singlestoreDb: db } = t; + +// await db.insert(usersTable).values([ +// { id: 1, name: 'Dan' }, +// { id: 2, name: 'Andrew' }, +// { id: 3, name: 'Alex' }, +// ]); + +// await db.insert(groupsTable).values([ +// { id: 1, name: 'Group1' }, +// { id: 2, name: 'Group2' }, +// { id: 3, name: 'Group3' }, +// ]); + +// await db.insert(usersToGroupsTable).values([ +// { userId: 1, groupId: 1 }, +// { userId: 2, groupId: 2 }, +// { userId: 3, groupId: 3 }, +// { userId: 3, groupId: 2 }, +// ]); + +// const response = await db.query.groupsTable.findMany({ +// limit: 1, +// where: gt(groupsTable.id, 1), +// with: { +// usersToGroups: { +// where: eq(usersToGroupsTable.userId, 2), +// limit: 1, +// columns: {}, +// with: { +// user: true, +// }, +// }, +// }, +// }); + +// expectTypeOf(response).toEqualTypeOf<{ +// id: number; +// name: string; +// description: string | null; +// usersToGroups: { +// user: { +// id: number; +// name: string; +// verified: boolean; +// invitedBy: number | null; +// }; +// }[]; +// }[]>(); + +// response.sort((a, b) => (a.id > b.id) ? 1 : -1); + +// expect(response.length).toEqual(1); + +// expect(response[0]?.usersToGroups.length).toEqual(1); + +// expect(response).toContainEqual({ +// id: 2, +// name: 'Group2', +// description: null, +// usersToGroups: [{ +// user: { +// id: 2, +// name: 'Andrew', +// verified: false, +// invitedBy: null, +// }, +// }], +// }); +// }); + +// test.skip('[Find Many] Get users with groups + where', async (t) => { +// const { singlestoreDb: db } = t; + +// await db.insert(usersTable).values([ +// { id: 1, name: 'Dan' }, +// { id: 2, name: 'Andrew' }, +// { id: 3, name: 'Alex' }, +// ]); + +// await db.insert(groupsTable).values([ +// { id: 1, name: 'Group1' }, +// { id: 2, name: 'Group2' }, +// { id: 3, name: 'Group3' }, +// ]); + +// await db.insert(usersToGroupsTable).values([ +// { userId: 1, groupId: 1 }, +// { userId: 2, groupId: 2 }, +// { userId: 2, groupId: 3 }, +// { userId: 3, groupId: 2 }, +// ]); + +// const response = await db.query.usersTable.findMany({ +// where: (_, { eq, or }) => or(eq(usersTable.id, 1), eq(usersTable.id, 2)), +// with: { +// usersToGroups: { +// where: eq(usersToGroupsTable.groupId, 2), +// columns: {}, +// with: { +// group: true, +// }, +// }, +// }, +// }); + +// expectTypeOf(response).toEqualTypeOf<{ +// id: number; +// name: string; +// verified: boolean; +// invitedBy: number | null; +// usersToGroups: { +// group: { +// id: number; +// name: string; +// description: string | null; +// }; +// }[]; +// }[]>(); + +// response.sort((a, b) => (a.id > b.id) ? 1 : -1); + +// expect(response.length).toEqual(2); + +// expect(response[0]?.usersToGroups.length).toEqual(0); +// expect(response[1]?.usersToGroups.length).toEqual(1); + +// expect(response).toContainEqual({ +// id: 1, +// name: 'Dan', +// verified: false, +// invitedBy: null, +// usersToGroups: [], +// }); + +// expect(response).toContainEqual({ +// id: 2, +// name: 'Andrew', +// verified: false, +// invitedBy: null, +// usersToGroups: [{ +// group: { +// id: 2, +// name: 'Group2', +// description: null, +// }, +// }], +// }); +// }); + +// test.skip('[Find Many] Get groups with users + where', async (t) => { +// const { singlestoreDb: db } = t; + +// await db.insert(usersTable).values([ +// { id: 1, name: 'Dan' }, +// { id: 2, name: 'Andrew' }, +// { id: 3, name: 'Alex' }, +// ]); + +// await db.insert(groupsTable).values([ +// { id: 1, name: 'Group1' }, +// { id: 2, name: 'Group2' }, +// { id: 3, name: 'Group3' }, +// ]); + +// await db.insert(usersToGroupsTable).values([ +// { userId: 1, groupId: 1 }, +// { userId: 2, groupId: 2 }, +// { userId: 3, groupId: 3 }, +// { userId: 3, groupId: 2 }, +// ]); + +// const response = await db.query.groupsTable.findMany({ +// where: gt(groupsTable.id, 1), +// with: { +// usersToGroups: { +// where: eq(usersToGroupsTable.userId, 2), +// columns: {}, +// with: { +// user: true, +// }, +// }, +// }, +// }); + +// expectTypeOf(response).toEqualTypeOf<{ +// id: number; +// name: string; +// description: string | null; +// usersToGroups: { +// user: { +// id: number; +// name: string; +// verified: boolean; +// invitedBy: number | null; +// }; +// }[]; +// }[]>(); + +// response.sort((a, b) => (a.id > b.id) ? 1 : -1); + +// expect(response.length).toEqual(2); + +// expect(response[0]?.usersToGroups.length).toEqual(1); +// expect(response[1]?.usersToGroups.length).toEqual(0); + +// expect(response).toContainEqual({ +// id: 2, +// name: 'Group2', +// description: null, +// usersToGroups: [{ +// user: { +// id: 2, +// name: 'Andrew', +// verified: false, +// invitedBy: null, +// }, +// }], +// }); + +// expect(response).toContainEqual({ +// id: 3, +// name: 'Group3', +// description: null, +// usersToGroups: [], +// }); +// }); + +// test.skip('[Find Many] Get users with groups + orderBy', async (t) => { +// const { singlestoreDb: db } = t; + +// await db.insert(usersTable).values([ +// { id: 1, name: 'Dan' }, +// { id: 2, name: 'Andrew' }, +// { id: 3, name: 'Alex' }, +// ]); + +// await db.insert(groupsTable).values([ +// { id: 1, name: 'Group1' }, +// { id: 2, name: 'Group2' }, +// { id: 3, name: 'Group3' }, +// ]); + +// await db.insert(usersToGroupsTable).values([ +// { userId: 1, groupId: 1 }, +// { userId: 2, groupId: 2 }, +// { userId: 3, groupId: 3 }, +// { userId: 3, groupId: 2 }, +// ]); + +// const response = await db.query.usersTable.findMany({ +// orderBy: (users, { desc }) => [desc(users.id)], +// with: { +// usersToGroups: { +// orderBy: [desc(usersToGroupsTable.groupId)], +// columns: {}, +// with: { +// group: true, +// }, +// }, +// }, +// }); + +// expectTypeOf(response).toEqualTypeOf<{ +// id: number; +// name: string; +// verified: boolean; +// invitedBy: number | null; +// usersToGroups: { +// group: { +// id: number; +// name: string; +// description: string | null; +// }; +// }[]; +// }[]>(); + +// expect(response.length).toEqual(3); + +// expect(response[0]?.usersToGroups.length).toEqual(2); +// expect(response[1]?.usersToGroups.length).toEqual(1); +// expect(response[2]?.usersToGroups.length).toEqual(1); + +// expect(response[2]).toEqual({ +// id: 1, +// name: 'Dan', +// verified: false, +// invitedBy: null, +// usersToGroups: [{ +// group: { +// id: 1, +// name: 'Group1', +// description: null, +// }, +// }], +// }); + +// expect(response[1]).toEqual({ +// id: 2, +// name: 'Andrew', +// verified: false, +// invitedBy: null, +// usersToGroups: [{ +// group: { +// id: 2, +// name: 'Group2', +// description: null, +// }, +// }], +// }); + +// expect(response[0]).toEqual({ +// id: 3, +// name: 'Alex', +// verified: false, +// invitedBy: null, +// usersToGroups: [{ +// group: { +// id: 3, +// name: 'Group3', +// description: null, +// }, +// }, { +// group: { +// id: 2, +// name: 'Group2', +// description: null, +// }, +// }], +// }); +// }); + +// test.skip('[Find Many] Get groups with users + orderBy', async (t) => { +// const { singlestoreDb: db } = t; + +// await db.insert(usersTable).values([ +// { id: 1, name: 'Dan' }, +// { id: 2, name: 'Andrew' }, +// { id: 3, name: 'Alex' }, +// ]); + +// await db.insert(groupsTable).values([ +// { id: 1, name: 'Group1' }, +// { id: 2, name: 'Group2' }, +// { id: 3, name: 'Group3' }, +// ]); + +// await db.insert(usersToGroupsTable).values([ +// { userId: 1, groupId: 1 }, +// { userId: 2, groupId: 2 }, +// { userId: 3, groupId: 3 }, +// { userId: 3, groupId: 2 }, +// ]); + +// const response = await db.query.groupsTable.findMany({ +// orderBy: [desc(groupsTable.id)], +// with: { +// usersToGroups: { +// orderBy: (utg, { desc }) => [desc(utg.userId)], +// columns: {}, +// with: { +// user: true, +// }, +// }, +// }, +// }); + +// expectTypeOf(response).toEqualTypeOf<{ +// id: number; +// name: string; +// description: string | null; +// usersToGroups: { +// user: { +// id: number; +// name: string; +// verified: boolean; +// invitedBy: number | null; +// }; +// }[]; +// }[]>(); + +// expect(response.length).toEqual(3); + +// expect(response[0]?.usersToGroups.length).toEqual(1); +// expect(response[1]?.usersToGroups.length).toEqual(2); +// expect(response[2]?.usersToGroups.length).toEqual(1); + +// expect(response[2]).toEqual({ +// id: 1, +// name: 'Group1', +// description: null, +// usersToGroups: [{ +// user: { +// id: 1, +// name: 'Dan', +// verified: false, +// invitedBy: null, +// }, +// }], +// }); + +// expect(response[1]).toEqual({ +// id: 2, +// name: 'Group2', +// description: null, +// usersToGroups: [{ +// user: { +// id: 3, +// name: 'Alex', +// verified: false, +// invitedBy: null, +// }, +// }, { +// user: { +// id: 2, +// name: 'Andrew', +// verified: false, +// invitedBy: null, +// }, +// }], +// }); + +// expect(response[0]).toEqual({ +// id: 3, +// name: 'Group3', +// description: null, +// usersToGroups: [{ +// user: { +// id: 3, +// name: 'Alex', +// verified: false, +// invitedBy: null, +// }, +// }], +// }); +// }); + +// test.skip('[Find Many] Get users with groups + orderBy + limit', async (t) => { +// const { singlestoreDb: db } = t; + +// await db.insert(usersTable).values([ +// { id: 1, name: 'Dan' }, +// { id: 2, name: 'Andrew' }, +// { id: 3, name: 'Alex' }, +// ]); + +// await db.insert(groupsTable).values([ +// { id: 1, name: 'Group1' }, +// { id: 2, name: 'Group2' }, +// { id: 3, name: 'Group3' }, +// ]); + +// await db.insert(usersToGroupsTable).values([ +// { userId: 1, groupId: 1 }, +// { userId: 2, groupId: 2 }, +// { userId: 3, groupId: 3 }, +// { userId: 3, groupId: 2 }, +// ]); + +// const response = await db.query.usersTable.findMany({ +// orderBy: (users, { desc }) => [desc(users.id)], +// limit: 2, +// with: { +// usersToGroups: { +// limit: 1, +// orderBy: [desc(usersToGroupsTable.groupId)], +// columns: {}, +// with: { +// group: true, +// }, +// }, +// }, +// }); + +// expectTypeOf(response).toEqualTypeOf<{ +// id: number; +// name: string; +// verified: boolean; +// invitedBy: number | null; +// usersToGroups: { +// group: { +// id: number; +// name: string; +// description: string | null; +// }; +// }[]; +// }[]>(); + +// expect(response.length).toEqual(2); + +// expect(response[0]?.usersToGroups.length).toEqual(1); +// expect(response[1]?.usersToGroups.length).toEqual(1); + +// expect(response[1]).toEqual({ +// id: 2, +// name: 'Andrew', +// verified: false, +// invitedBy: null, +// usersToGroups: [{ +// group: { +// id: 2, +// name: 'Group2', +// description: null, +// }, +// }], +// }); + +// expect(response[0]).toEqual({ +// id: 3, +// name: 'Alex', +// verified: false, +// invitedBy: null, +// usersToGroups: [{ +// group: { +// id: 3, +// name: 'Group3', +// description: null, +// }, +// }], +// }); +// }); + +// /* +// [Find One] Many-to-many cases + +// Users+users_to_groups+groups +// */ + +// test.skip('[Find One] Get users with groups', async (t) => { +// const { singlestoreDb: db } = t; + +// await db.insert(usersTable).values([ +// { id: 1, name: 'Dan' }, +// { id: 2, name: 'Andrew' }, +// { id: 3, name: 'Alex' }, +// ]); + +// await db.insert(groupsTable).values([ +// { id: 1, name: 'Group1' }, +// { id: 2, name: 'Group2' }, +// { id: 3, name: 'Group3' }, +// ]); + +// await db.insert(usersToGroupsTable).values([ +// { userId: 1, groupId: 1 }, +// { userId: 2, groupId: 2 }, +// { userId: 3, groupId: 3 }, +// { userId: 3, groupId: 2 }, +// ]); + +// const response = await db.query.usersTable.findFirst({ +// with: { +// usersToGroups: { +// columns: {}, +// with: { +// group: true, +// }, +// }, +// }, +// }); + +// expectTypeOf(response).toEqualTypeOf< +// { +// id: number; +// name: string; +// verified: boolean; +// invitedBy: number | null; +// usersToGroups: { +// group: { +// id: number; +// name: string; +// description: string | null; +// }; +// }[]; +// } | undefined +// >(); + +// expect(response?.usersToGroups.length).toEqual(1); + +// expect(response).toEqual({ +// id: 1, +// name: 'Dan', +// verified: false, +// invitedBy: null, +// usersToGroups: [{ +// group: { +// id: 1, +// name: 'Group1', +// description: null, +// }, +// }], +// }); +// }); + +// test.skip('[Find One] Get groups with users', async (t) => { +// const { singlestoreDb: db } = t; + +// await db.insert(usersTable).values([ +// { id: 1, name: 'Dan' }, +// { id: 2, name: 'Andrew' }, +// { id: 3, name: 'Alex' }, +// ]); + +// await db.insert(groupsTable).values([ +// { id: 1, name: 'Group1' }, +// { id: 2, name: 'Group2' }, +// { id: 3, name: 'Group3' }, +// ]); + +// await db.insert(usersToGroupsTable).values([ +// { userId: 1, groupId: 1 }, +// { userId: 2, groupId: 2 }, +// { userId: 3, groupId: 3 }, +// { userId: 3, groupId: 2 }, +// ]); + +// const response = await db.query.groupsTable.findFirst({ +// with: { +// usersToGroups: { +// columns: {}, +// with: { +// user: true, +// }, +// }, +// }, +// }); + +// expectTypeOf(response).toEqualTypeOf< +// { +// id: number; +// name: string; +// description: string | null; +// usersToGroups: { +// user: { +// id: number; +// name: string; +// verified: boolean; +// invitedBy: number | null; +// }; +// }[]; +// } | undefined +// >(); + +// expect(response?.usersToGroups.length).toEqual(1); + +// expect(response).toEqual({ +// id: 1, +// name: 'Group1', +// description: null, +// usersToGroups: [{ +// user: { +// id: 1, +// name: 'Dan', +// verified: false, +// invitedBy: null, +// }, +// }], +// }); +// }); + +// test.skip('[Find One] Get users with groups + limit', async (t) => { +// const { singlestoreDb: db } = t; + +// await db.insert(usersTable).values([ +// { id: 1, name: 'Dan' }, +// { id: 2, name: 'Andrew' }, +// { id: 3, name: 'Alex' }, +// ]); + +// await db.insert(groupsTable).values([ +// { id: 1, name: 'Group1' }, +// { id: 2, name: 'Group2' }, +// { id: 3, name: 'Group3' }, +// ]); + +// await db.insert(usersToGroupsTable).values([ +// { userId: 1, groupId: 1 }, +// { userId: 2, groupId: 2 }, +// { userId: 2, groupId: 3 }, +// { userId: 3, groupId: 2 }, +// ]); + +// const response = await db.query.usersTable.findFirst({ +// with: { +// usersToGroups: { +// limit: 1, +// columns: {}, +// with: { +// group: true, +// }, +// }, +// }, +// }); + +// expectTypeOf(response).toEqualTypeOf< +// { +// id: number; +// name: string; +// verified: boolean; +// invitedBy: number | null; +// usersToGroups: { +// group: { +// id: number; +// name: string; +// description: string | null; +// }; +// }[]; +// } | undefined +// >(); + +// expect(response?.usersToGroups.length).toEqual(1); + +// expect(response).toEqual({ +// id: 1, +// name: 'Dan', +// verified: false, +// invitedBy: null, +// usersToGroups: [{ +// group: { +// id: 1, +// name: 'Group1', +// description: null, +// }, +// }], +// }); +// }); + +// test.skip('[Find One] Get groups with users + limit', async (t) => { +// const { singlestoreDb: db } = t; + +// await db.insert(usersTable).values([ +// { id: 1, name: 'Dan' }, +// { id: 2, name: 'Andrew' }, +// { id: 3, name: 'Alex' }, +// ]); + +// await db.insert(groupsTable).values([ +// { id: 1, name: 'Group1' }, +// { id: 2, name: 'Group2' }, +// { id: 3, name: 'Group3' }, +// ]); + +// await db.insert(usersToGroupsTable).values([ +// { userId: 1, groupId: 1 }, +// { userId: 2, groupId: 2 }, +// { userId: 3, groupId: 3 }, +// { userId: 3, groupId: 2 }, +// ]); + +// const response = await db.query.groupsTable.findFirst({ +// with: { +// usersToGroups: { +// limit: 1, +// columns: {}, +// with: { +// user: true, +// }, +// }, +// }, +// }); + +// expectTypeOf(response).toEqualTypeOf< +// { +// id: number; +// name: string; +// description: string | null; +// usersToGroups: { +// user: { +// id: number; +// name: string; +// verified: boolean; +// invitedBy: number | null; +// }; +// }[]; +// } | undefined +// >(); + +// expect(response?.usersToGroups.length).toEqual(1); + +// expect(response).toEqual({ +// id: 1, +// name: 'Group1', +// description: null, +// usersToGroups: [{ +// user: { +// id: 1, +// name: 'Dan', +// verified: false, +// invitedBy: null, +// }, +// }], +// }); +// }); + +// test.skip('[Find One] Get users with groups + limit + where', async (t) => { +// const { singlestoreDb: db } = t; + +// await db.insert(usersTable).values([ +// { id: 1, name: 'Dan' }, +// { id: 2, name: 'Andrew' }, +// { id: 3, name: 'Alex' }, +// ]); + +// await db.insert(groupsTable).values([ +// { id: 1, name: 'Group1' }, +// { id: 2, name: 'Group2' }, +// { id: 3, name: 'Group3' }, +// ]); + +// await db.insert(usersToGroupsTable).values([ +// { userId: 1, groupId: 1 }, +// { userId: 2, groupId: 2 }, +// { userId: 2, groupId: 3 }, +// { userId: 3, groupId: 2 }, +// ]); + +// const response = await db.query.usersTable.findFirst({ +// where: (_, { eq, or }) => or(eq(usersTable.id, 1), eq(usersTable.id, 2)), +// with: { +// usersToGroups: { +// where: eq(usersToGroupsTable.groupId, 1), +// columns: {}, +// with: { +// group: true, +// }, +// }, +// }, +// }); + +// expectTypeOf(response).toEqualTypeOf< +// { +// id: number; +// name: string; +// verified: boolean; +// invitedBy: number | null; +// usersToGroups: { +// group: { +// id: number; +// name: string; +// description: string | null; +// }; +// }[]; +// } | undefined +// >(); + +// expect(response?.usersToGroups.length).toEqual(1); + +// expect(response).toEqual({ +// id: 1, +// name: 'Dan', +// verified: false, +// invitedBy: null, +// usersToGroups: [{ +// group: { +// id: 1, +// name: 'Group1', +// description: null, +// }, +// }], +// }); +// }); + +// test.skip('[Find One] Get groups with users + limit + where', async (t) => { +// const { singlestoreDb: db } = t; + +// await db.insert(usersTable).values([ +// { id: 1, name: 'Dan' }, +// { id: 2, name: 'Andrew' }, +// { id: 3, name: 'Alex' }, +// ]); + +// await db.insert(groupsTable).values([ +// { id: 1, name: 'Group1' }, +// { id: 2, name: 'Group2' }, +// { id: 3, name: 'Group3' }, +// ]); + +// await db.insert(usersToGroupsTable).values([ +// { userId: 1, groupId: 1 }, +// { userId: 2, groupId: 2 }, +// { userId: 3, groupId: 3 }, +// { userId: 3, groupId: 2 }, +// ]); + +// const response = await db.query.groupsTable.findFirst({ +// where: gt(groupsTable.id, 1), +// with: { +// usersToGroups: { +// where: eq(usersToGroupsTable.userId, 2), +// limit: 1, +// columns: {}, +// with: { +// user: true, +// }, +// }, +// }, +// }); + +// expectTypeOf(response).toEqualTypeOf< +// { +// id: number; +// name: string; +// description: string | null; +// usersToGroups: { +// user: { +// id: number; +// name: string; +// verified: boolean; +// invitedBy: number | null; +// }; +// }[]; +// } | undefined +// >(); + +// expect(response?.usersToGroups.length).toEqual(1); + +// expect(response).toEqual({ +// id: 2, +// name: 'Group2', +// description: null, +// usersToGroups: [{ +// user: { +// id: 2, +// name: 'Andrew', +// verified: false, +// invitedBy: null, +// }, +// }], +// }); +// }); + +// test.skip('[Find One] Get users with groups + where', async (t) => { +// const { singlestoreDb: db } = t; + +// await db.insert(usersTable).values([ +// { id: 1, name: 'Dan' }, +// { id: 2, name: 'Andrew' }, +// { id: 3, name: 'Alex' }, +// ]); + +// await db.insert(groupsTable).values([ +// { id: 1, name: 'Group1' }, +// { id: 2, name: 'Group2' }, +// { id: 3, name: 'Group3' }, +// ]); + +// await db.insert(usersToGroupsTable).values([ +// { userId: 1, groupId: 1 }, +// { userId: 2, groupId: 2 }, +// { userId: 2, groupId: 3 }, +// { userId: 3, groupId: 2 }, +// ]); + +// const response = await db.query.usersTable.findFirst({ +// where: (_, { eq, or }) => or(eq(usersTable.id, 1), eq(usersTable.id, 2)), +// with: { +// usersToGroups: { +// where: eq(usersToGroupsTable.groupId, 2), +// columns: {}, +// with: { +// group: true, +// }, +// }, +// }, +// }); + +// expectTypeOf(response).toEqualTypeOf< +// { +// id: number; +// name: string; +// verified: boolean; +// invitedBy: number | null; +// usersToGroups: { +// group: { +// id: number; +// name: string; +// description: string | null; +// }; +// }[]; +// } | undefined +// >(); + +// expect(response?.usersToGroups.length).toEqual(0); + +// expect(response).toEqual({ +// id: 1, +// name: 'Dan', +// verified: false, +// invitedBy: null, +// usersToGroups: [], +// }); +// }); + +// test.skip('[Find One] Get groups with users + where', async (t) => { +// const { singlestoreDb: db } = t; + +// await db.insert(usersTable).values([ +// { id: 1, name: 'Dan' }, +// { id: 2, name: 'Andrew' }, +// { id: 3, name: 'Alex' }, +// ]); + +// await db.insert(groupsTable).values([ +// { id: 1, name: 'Group1' }, +// { id: 2, name: 'Group2' }, +// { id: 3, name: 'Group3' }, +// ]); + +// await db.insert(usersToGroupsTable).values([ +// { userId: 1, groupId: 1 }, +// { userId: 2, groupId: 2 }, +// { userId: 3, groupId: 3 }, +// { userId: 3, groupId: 2 }, +// ]); + +// const response = await db.query.groupsTable.findFirst({ +// where: gt(groupsTable.id, 1), +// with: { +// usersToGroups: { +// where: eq(usersToGroupsTable.userId, 2), +// columns: {}, +// with: { +// user: true, +// }, +// }, +// }, +// }); + +// expectTypeOf(response).toEqualTypeOf< +// { +// id: number; +// name: string; +// description: string | null; +// usersToGroups: { +// user: { +// id: number; +// name: string; +// verified: boolean; +// invitedBy: number | null; +// }; +// }[]; +// } | undefined +// >(); + +// expect(response?.usersToGroups.length).toEqual(1); + +// expect(response).toEqual({ +// id: 2, +// name: 'Group2', +// description: null, +// usersToGroups: [{ +// user: { +// id: 2, +// name: 'Andrew', +// verified: false, +// invitedBy: null, +// }, +// }], +// }); +// }); + +// test.skip('[Find One] Get users with groups + orderBy', async (t) => { +// const { singlestoreDb: db } = t; + +// await db.insert(usersTable).values([ +// { id: 1, name: 'Dan' }, +// { id: 2, name: 'Andrew' }, +// { id: 3, name: 'Alex' }, +// ]); + +// await db.insert(groupsTable).values([ +// { id: 1, name: 'Group1' }, +// { id: 2, name: 'Group2' }, +// { id: 3, name: 'Group3' }, +// ]); + +// await db.insert(usersToGroupsTable).values([ +// { userId: 1, groupId: 1 }, +// { userId: 2, groupId: 2 }, +// { userId: 3, groupId: 3 }, +// { userId: 3, groupId: 2 }, +// ]); + +// const response = await db.query.usersTable.findFirst({ +// orderBy: (users, { desc }) => [desc(users.id)], +// with: { +// usersToGroups: { +// orderBy: [desc(usersToGroupsTable.groupId)], +// columns: {}, +// with: { +// group: true, +// }, +// }, +// }, +// }); + +// expectTypeOf(response).toEqualTypeOf< +// { +// id: number; +// name: string; +// verified: boolean; +// invitedBy: number | null; +// usersToGroups: { +// group: { +// id: number; +// name: string; +// description: string | null; +// }; +// }[]; +// } | undefined +// >(); + +// expect(response?.usersToGroups.length).toEqual(2); + +// expect(response).toEqual({ +// id: 3, +// name: 'Alex', +// verified: false, +// invitedBy: null, +// usersToGroups: [{ +// group: { +// id: 3, +// name: 'Group3', +// description: null, +// }, +// }, { +// group: { +// id: 2, +// name: 'Group2', +// description: null, +// }, +// }], +// }); +// }); + +// test.skip('[Find One] Get groups with users + orderBy', async (t) => { +// const { singlestoreDb: db } = t; + +// await db.insert(usersTable).values([ +// { id: 1, name: 'Dan' }, +// { id: 2, name: 'Andrew' }, +// { id: 3, name: 'Alex' }, +// ]); + +// await db.insert(groupsTable).values([ +// { id: 1, name: 'Group1' }, +// { id: 2, name: 'Group2' }, +// { id: 3, name: 'Group3' }, +// ]); + +// await db.insert(usersToGroupsTable).values([ +// { userId: 1, groupId: 1 }, +// { userId: 2, groupId: 2 }, +// { userId: 3, groupId: 3 }, +// { userId: 3, groupId: 2 }, +// ]); + +// const response = await db.query.groupsTable.findFirst({ +// orderBy: [desc(groupsTable.id)], +// with: { +// usersToGroups: { +// orderBy: (utg, { desc }) => [desc(utg.userId)], +// columns: {}, +// with: { +// user: true, +// }, +// }, +// }, +// }); + +// expectTypeOf(response).toEqualTypeOf< +// { +// id: number; +// name: string; +// description: string | null; +// usersToGroups: { +// user: { +// id: number; +// name: string; +// verified: boolean; +// invitedBy: number | null; +// }; +// }[]; +// } | undefined +// >(); + +// expect(response?.usersToGroups.length).toEqual(1); + +// expect(response).toEqual({ +// id: 3, +// name: 'Group3', +// description: null, +// usersToGroups: [{ +// user: { +// id: 3, +// name: 'Alex', +// verified: false, +// invitedBy: null, +// }, +// }], +// }); +// }); + +// test.skip('[Find One] Get users with groups + orderBy + limit', async (t) => { +// const { singlestoreDb: db } = t; + +// await db.insert(usersTable).values([ +// { id: 1, name: 'Dan' }, +// { id: 2, name: 'Andrew' }, +// { id: 3, name: 'Alex' }, +// ]); + +// await db.insert(groupsTable).values([ +// { id: 1, name: 'Group1' }, +// { id: 2, name: 'Group2' }, +// { id: 3, name: 'Group3' }, +// ]); + +// await db.insert(usersToGroupsTable).values([ +// { userId: 1, groupId: 1 }, +// { userId: 2, groupId: 2 }, +// { userId: 3, groupId: 3 }, +// { userId: 3, groupId: 2 }, +// ]); + +// const response = await db.query.usersTable.findFirst({ +// orderBy: (users, { desc }) => [desc(users.id)], +// with: { +// usersToGroups: { +// limit: 1, +// orderBy: [desc(usersToGroupsTable.groupId)], +// columns: {}, +// with: { +// group: true, +// }, +// }, +// }, +// }); + +// expectTypeOf(response).toEqualTypeOf< +// { +// id: number; +// name: string; +// verified: boolean; +// invitedBy: number | null; +// usersToGroups: { +// group: { +// id: number; +// name: string; +// description: string | null; +// }; +// }[]; +// } | undefined +// >(); + +// expect(response?.usersToGroups.length).toEqual(1); + +// expect(response).toEqual({ +// id: 3, +// name: 'Alex', +// verified: false, +// invitedBy: null, +// usersToGroups: [{ +// group: { +// id: 3, +// name: 'Group3', +// description: null, +// }, +// }], +// }); +// }); + +// test.skip('Get groups with users + orderBy + limit', async (t) => { +// const { singlestoreDb: db } = t; + +// await db.insert(usersTable).values([ +// { id: 1, name: 'Dan' }, +// { id: 2, name: 'Andrew' }, +// { id: 3, name: 'Alex' }, +// ]); + +// await db.insert(groupsTable).values([ +// { id: 1, name: 'Group1' }, +// { id: 2, name: 'Group2' }, +// { id: 3, name: 'Group3' }, +// ]); + +// await db.insert(usersToGroupsTable).values([ +// { userId: 1, groupId: 1 }, +// { userId: 2, groupId: 2 }, +// { userId: 3, groupId: 3 }, +// { userId: 3, groupId: 2 }, +// ]); + +// const response = await db.query.groupsTable.findMany({ +// orderBy: [desc(groupsTable.id)], +// limit: 2, +// with: { +// usersToGroups: { +// limit: 1, +// orderBy: (utg, { desc }) => [desc(utg.userId)], +// columns: {}, +// with: { +// user: true, +// }, +// }, +// }, +// }); + +// expectTypeOf(response).toEqualTypeOf< +// { +// id: number; +// name: string; +// description: string | null; +// usersToGroups: { +// user: { +// id: number; +// name: string; +// verified: boolean; +// invitedBy: number | null; +// }; +// }[]; +// }[] +// >(); + +// expect(response.length).toEqual(2); + +// expect(response[0]?.usersToGroups.length).toEqual(1); +// expect(response[1]?.usersToGroups.length).toEqual(1); + +// expect(response[1]).toEqual({ +// id: 2, +// name: 'Group2', +// description: null, +// usersToGroups: [{ +// user: { +// id: 3, +// name: 'Alex', +// verified: false, +// invitedBy: null, +// }, +// }], +// }); + +// expect(response[0]).toEqual({ +// id: 3, +// name: 'Group3', +// description: null, +// usersToGroups: [{ +// user: { +// id: 3, +// name: 'Alex', +// verified: false, +// invitedBy: null, +// }, +// }], +// }); +// }); + +// test.skip('Get users with groups + custom', async (t) => { +// const { singlestoreDb: db } = t; + +// await db.insert(usersTable).values([ +// { id: 1, name: 'Dan' }, +// { id: 2, name: 'Andrew' }, +// { id: 3, name: 'Alex' }, +// ]); + +// await db.insert(groupsTable).values([ +// { id: 1, name: 'Group1' }, +// { id: 2, name: 'Group2' }, +// { id: 3, name: 'Group3' }, +// ]); + +// await db.insert(usersToGroupsTable).values([ +// { userId: 1, groupId: 1 }, +// { userId: 2, groupId: 2 }, +// { userId: 3, groupId: 3 }, +// { userId: 3, groupId: 2 }, +// ]); + +// const response = await db.query.usersTable.findMany({ +// extras: { +// lower: sql`lower(${usersTable.name})`.as('lower_name'), +// }, +// with: { +// usersToGroups: { +// columns: {}, +// with: { +// group: { +// extras: { +// lower: sql`lower(${groupsTable.name})`.as('lower_name'), +// }, +// }, +// }, +// }, +// }, +// }); + +// expectTypeOf(response).toEqualTypeOf< +// { +// id: number; +// name: string; +// verified: boolean; +// invitedBy: number | null; +// lower: string; +// usersToGroups: { +// group: { +// id: number; +// name: string; +// description: string | null; +// lower: string; +// }; +// }[]; +// }[] +// >(); + +// response.sort((a, b) => (a.id > b.id) ? 1 : -1); + +// expect(response.length).toEqual(3); + +// expect(response[0]?.usersToGroups.length).toEqual(1); +// expect(response[1]?.usersToGroups.length).toEqual(1); +// expect(response[2]?.usersToGroups.length).toEqual(2); + +// expect(response).toContainEqual({ +// id: 1, +// name: 'Dan', +// lower: 'dan', +// verified: false, +// invitedBy: null, +// usersToGroups: [{ +// group: { +// id: 1, +// name: 'Group1', +// lower: 'group1', +// description: null, +// }, +// }], +// }); + +// expect(response).toContainEqual({ +// id: 2, +// name: 'Andrew', +// lower: 'andrew', +// verified: false, +// invitedBy: null, +// usersToGroups: [{ +// group: { +// id: 2, +// name: 'Group2', +// lower: 'group2', +// description: null, +// }, +// }], +// }); + +// expect(response).toContainEqual({ +// id: 3, +// name: 'Alex', +// lower: 'alex', +// verified: false, +// invitedBy: null, +// usersToGroups: [{ +// group: { +// id: 3, +// name: 'Group3', +// lower: 'group3', +// description: null, +// }, +// }, { +// group: { +// id: 2, +// name: 'Group2', +// lower: 'group2', +// description: null, +// }, +// }], +// }); +// }); + +// test.skip('Get groups with users + custom', async (t) => { +// const { singlestoreDb: db } = t; + +// await db.insert(usersTable).values([ +// { id: 1, name: 'Dan' }, +// { id: 2, name: 'Andrew' }, +// { id: 3, name: 'Alex' }, +// ]); + +// await db.insert(groupsTable).values([ +// { id: 1, name: 'Group1' }, +// { id: 2, name: 'Group2' }, +// { id: 3, name: 'Group3' }, +// ]); + +// await db.insert(usersToGroupsTable).values([ +// { userId: 1, groupId: 1 }, +// { userId: 2, groupId: 2 }, +// { userId: 3, groupId: 3 }, +// { userId: 3, groupId: 2 }, +// ]); + +// const response = await db.query.groupsTable.findMany({ +// extras: (table, { sql }) => ({ +// lower: sql`lower(${table.name})`.as('lower_name'), +// }), +// with: { +// usersToGroups: { +// columns: {}, +// with: { +// user: { +// extras: (table, { sql }) => ({ +// lower: sql`lower(${table.name})`.as('lower_name'), +// }), +// }, +// }, +// }, +// }, +// }); + +// expectTypeOf(response).toEqualTypeOf< +// { +// id: number; +// name: string; +// description: string | null; +// lower: string; +// usersToGroups: { +// user: { +// id: number; +// name: string; +// verified: boolean; +// invitedBy: number | null; +// lower: string; +// }; +// }[]; +// }[] +// >(); + +// response.sort((a, b) => (a.id > b.id) ? 1 : -1); + +// expect(response.length).toEqual(3); + +// expect(response[0]?.usersToGroups.length).toEqual(1); +// expect(response[1]?.usersToGroups.length).toEqual(2); +// expect(response[2]?.usersToGroups.length).toEqual(1); + +// expect(response).toContainEqual({ +// id: 1, +// name: 'Group1', +// lower: 'group1', +// description: null, +// usersToGroups: [{ +// user: { +// id: 1, +// name: 'Dan', +// lower: 'dan', +// verified: false, +// invitedBy: null, +// }, +// }], +// }); + +// expect(response).toContainEqual({ +// id: 2, +// name: 'Group2', +// lower: 'group2', +// description: null, +// usersToGroups: [{ +// user: { +// id: 2, +// name: 'Andrew', +// lower: 'andrew', +// verified: false, +// invitedBy: null, +// }, +// }, { +// user: { +// id: 3, +// name: 'Alex', +// lower: 'alex', +// verified: false, +// invitedBy: null, +// }, +// }], +// }); + +// expect(response).toContainEqual({ +// id: 3, +// name: 'Group3', +// lower: 'group3', +// description: null, +// usersToGroups: [{ +// user: { +// id: 3, +// name: 'Alex', +// lower: 'alex', +// verified: false, +// invitedBy: null, +// }, +// }], +// }); +// }); + +// test('.toSQL()', () => { +// const query = db.query.usersTable.findFirst().toSQL(); + +// expect(query).toHaveProperty('sql', expect.any(String)); +// expect(query).toHaveProperty('params', expect.any(Array)); +// }); + +// // + custom + where + orderby + +// // + custom + where + orderby + limit + +// // + partial + +// // + partial(false) + +// // + partial + orderBy + where (all not selected) + +// /* +// One four-level relation users+posts+comments+coment_likes +// + users+users_to_groups+groups +// */ + +// /* +// Really hard case +// 1. users+posts+comments+coment_likes +// 2. users+users_to_groups+groups +// 3. users+users +// */ +// eslint-disable-next-line unicorn/no-empty-file diff --git a/integration-tests/tests/replicas/singlestore.test.ts b/integration-tests/tests/replicas/singlestore.test.ts new file mode 100644 index 000000000..8ddad5b04 --- /dev/null +++ b/integration-tests/tests/replicas/singlestore.test.ts @@ -0,0 +1,814 @@ +import { sql } from 'drizzle-orm'; +import { drizzle } from 'drizzle-orm/singlestore'; +import { serial, singlestoreTable, withReplicas } from 'drizzle-orm/singlestore-core'; +import { describe, expect, it, vi } from 'vitest'; + +// const usersTable = singlestoreTable('users', { +// id: serial('id' as string).primaryKey(), +// name: text('name').notNull(), +// verified: boolean('verified').notNull().default(false), +// }); + +const users = singlestoreTable('users', { + id: serial('id' as string).primaryKey(), +}); + +describe('[select] read replicas singlestore', () => { + it('primary select', () => { + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); + const read2 = drizzle.mock(); + + const db = withReplicas(primaryDb, [read1, read2]); + + const spyPrimary = vi.spyOn(primaryDb, 'select'); + const spyRead1 = vi.spyOn(read1, 'select'); + const spyRead2 = vi.spyOn(read2, 'select'); + + const query = db.$primary.select().from(users); + + expect(spyPrimary).toHaveBeenCalledTimes(1); + expect(query.toSQL().sql).toEqual('select `id` from `users`'); + + expect(spyRead1).toHaveBeenCalledTimes(0); + expect(spyRead2).toHaveBeenCalledTimes(0); + }); + + it('random replica select', () => { + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); + const read2 = drizzle.mock(); + + const randomMockReplica = vi.fn().mockReturnValueOnce(read1).mockReturnValueOnce(read2); + + const db = withReplicas(primaryDb, [read1, read2], () => { + return randomMockReplica(); + }); + + const spyPrimary = vi.spyOn(primaryDb, 'select'); + const spyRead1 = vi.spyOn(read1, 'select'); + const spyRead2 = vi.spyOn(read2, 'select'); + + const query1 = db.select({ count: sql`count(*)`.as('count') }).from(users).limit(1); + + expect(spyPrimary).toHaveBeenCalledTimes(0); + expect(spyRead1).toHaveBeenCalledTimes(1); + expect(spyRead2).toHaveBeenCalledTimes(0); + + expect(query1.toSQL().sql).toEqual('select count(*) as `count` from `users` limit ?'); + + const query2 = db.select().from(users); + expect(spyRead1).toHaveBeenCalledTimes(1); + expect(spyRead2).toHaveBeenCalledTimes(1); + expect(query2.toSQL().sql).toEqual('select `id` from `users`'); + }); + + it('single read replica select', () => { + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); + + const db = withReplicas(primaryDb, [read1]); + + const spyPrimary = vi.spyOn(primaryDb, 'select'); + const spyRead1 = vi.spyOn(read1, 'select'); + + const query1 = db.select().from(users); + + expect(spyPrimary).toHaveBeenCalledTimes(0); + expect(spyRead1).toHaveBeenCalledTimes(1); + expect(query1.toSQL().sql).toEqual('select `id` from `users`'); + + const query2 = db.select().from(users); + expect(spyRead1).toHaveBeenCalledTimes(2); + expect(query2.toSQL().sql).toEqual('select `id` from `users`'); + }); + + it('single read replica select + primary select', () => { + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); + + const db = withReplicas(primaryDb, [read1]); + + const spyPrimary = vi.spyOn(primaryDb, 'select'); + const spyRead1 = vi.spyOn(read1, 'select'); + + const query1 = db.select({ id: users.id }).from(users); + + expect(spyPrimary).toHaveBeenCalledTimes(0); + expect(spyRead1).toHaveBeenCalledTimes(1); + expect(query1.toSQL().sql).toEqual('select `id` from `users`'); + + const query2 = db.$primary.select().from(users); + expect(spyPrimary).toHaveBeenCalledTimes(1); + expect(spyRead1).toHaveBeenCalledTimes(1); + expect(query2.toSQL().sql).toEqual('select `id` from `users`'); + }); + + it('always first read select', () => { + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); + const read2 = drizzle.mock(); + + const db = withReplicas(primaryDb, [read1, read2], (replicas) => { + return replicas[0]!; + }); + + const spyPrimary = vi.spyOn(primaryDb, 'select'); + const spyRead1 = vi.spyOn(read1, 'select'); + const spyRead2 = vi.spyOn(read2, 'select'); + + const query1 = db.select().from(users); + + expect(spyPrimary).toHaveBeenCalledTimes(0); + expect(spyRead1).toHaveBeenCalledTimes(1); + expect(spyRead2).toHaveBeenCalledTimes(0); + expect(query1.toSQL().sql).toEqual('select `id` from `users`'); + + const query2 = db.select().from(users); + + expect(spyRead1).toHaveBeenCalledTimes(2); + expect(spyRead2).toHaveBeenCalledTimes(0); + expect(query2.toSQL().sql).toEqual('select `id` from `users`'); + }); +}); + +describe('[selectDistinct] read replicas singlestore', () => { + it('primary selectDistinct', () => { + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); + const read2 = drizzle.mock(); + + const db = withReplicas(primaryDb, [read1, read2]); + + const spyPrimary = vi.spyOn(primaryDb, 'selectDistinct'); + const spyRead1 = vi.spyOn(read1, 'selectDistinct'); + const spyRead2 = vi.spyOn(read2, 'selectDistinct'); + + const query = db.$primary.selectDistinct().from(users); + + expect(spyPrimary).toHaveBeenCalledTimes(1); + expect(spyRead1).toHaveBeenCalledTimes(0); + expect(spyRead2).toHaveBeenCalledTimes(0); + expect(query.toSQL().sql).toEqual('select distinct `id` from `users`'); + }); + + it('random replica selectDistinct', () => { + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); + const read2 = drizzle.mock(); + + const randomMockReplica = vi.fn().mockReturnValueOnce(read1).mockReturnValueOnce(read2); + + const db = withReplicas(primaryDb, [read1, read2], () => { + return randomMockReplica(); + }); + + const spyPrimary = vi.spyOn(primaryDb, 'selectDistinct'); + const spyRead1 = vi.spyOn(read1, 'selectDistinct'); + const spyRead2 = vi.spyOn(read2, 'selectDistinct'); + + const query1 = db.selectDistinct().from(users); + + expect(spyPrimary).toHaveBeenCalledTimes(0); + expect(spyRead1).toHaveBeenCalledTimes(1); + expect(spyRead2).toHaveBeenCalledTimes(0); + expect(query1.toSQL().sql).toEqual('select distinct `id` from `users`'); + + const query2 = db.selectDistinct().from(users); + expect(spyRead1).toHaveBeenCalledTimes(1); + expect(spyRead2).toHaveBeenCalledTimes(1); + expect(query2.toSQL().sql).toEqual('select distinct `id` from `users`'); + }); + + it('single read replica selectDistinct', () => { + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); + + const db = withReplicas(primaryDb, [read1]); + + const spyPrimary = vi.spyOn(primaryDb, 'selectDistinct'); + const spyRead1 = vi.spyOn(read1, 'selectDistinct'); + + const query1 = db.selectDistinct().from(users); + + expect(spyPrimary).toHaveBeenCalledTimes(0); + expect(spyRead1).toHaveBeenCalledTimes(1); + expect(query1.toSQL().sql).toEqual('select distinct `id` from `users`'); + + const query2 = db.selectDistinct().from(users); + expect(spyRead1).toHaveBeenCalledTimes(2); + expect(query2.toSQL().sql).toEqual('select distinct `id` from `users`'); + }); + + it('single read replica selectDistinct + primary selectDistinct', () => { + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); + + const db = withReplicas(primaryDb, [read1]); + + const spyPrimary = vi.spyOn(primaryDb, 'selectDistinct'); + const spyRead1 = vi.spyOn(read1, 'selectDistinct'); + + const query1 = db.selectDistinct().from(users); + + expect(spyPrimary).toHaveBeenCalledTimes(0); + expect(spyRead1).toHaveBeenCalledTimes(1); + expect(query1.toSQL().sql).toEqual('select distinct `id` from `users`'); + + const query2 = db.$primary.selectDistinct().from(users); + expect(spyPrimary).toHaveBeenCalledTimes(1); + expect(spyRead1).toHaveBeenCalledTimes(1); + expect(query2.toSQL().sql).toEqual('select distinct `id` from `users`'); + }); + + it('always first read selectDistinct', () => { + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); + const read2 = drizzle.mock(); + + const db = withReplicas(primaryDb, [read1, read2], (replicas) => { + return replicas[0]!; + }); + + const spyPrimary = vi.spyOn(primaryDb, 'selectDistinct'); + const spyRead1 = vi.spyOn(read1, 'selectDistinct'); + const spyRead2 = vi.spyOn(read2, 'selectDistinct'); + + const query1 = db.selectDistinct().from(users); + + expect(spyPrimary).toHaveBeenCalledTimes(0); + expect(spyRead1).toHaveBeenCalledTimes(1); + expect(spyRead2).toHaveBeenCalledTimes(0); + expect(query1.toSQL().sql).toEqual('select distinct `id` from `users`'); + + const query2 = db.selectDistinct().from(users); + expect(spyRead1).toHaveBeenCalledTimes(2); + expect(spyRead2).toHaveBeenCalledTimes(0); + expect(query2.toSQL().sql).toEqual('select distinct `id` from `users`'); + }); +}); + +describe('[with] read replicas singlestore', () => { + it('primary with', () => { + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); + const read2 = drizzle.mock(); + + const db = withReplicas(primaryDb, [read1, read2]); + + const spyPrimary = vi.spyOn(primaryDb, 'with'); + const spyRead1 = vi.spyOn(read1, 'with'); + const spyRead2 = vi.spyOn(read2, 'with'); + const obj1 = {} as any; + const obj2 = {} as any; + const obj3 = {} as any; + const obj4 = {} as any; + + db.$primary.with(obj1, obj2, obj3, obj4); + + expect(spyPrimary).toHaveBeenCalledTimes(1); + expect(spyRead1).toHaveBeenCalledTimes(0); + expect(spyRead2).toHaveBeenCalledTimes(0); + expect(spyPrimary).toHaveBeenCalledWith(obj1, obj2, obj3, obj4); + }); + + it('random replica with', () => { + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); + const read2 = drizzle.mock(); + + const randomMockReplica = vi.fn().mockReturnValueOnce(read1).mockReturnValueOnce(read2); + + const db = withReplicas(primaryDb, [read1, read2], () => { + return randomMockReplica(); + }); + + const spyPrimary = vi.spyOn(primaryDb, 'with'); + const spyRead1 = vi.spyOn(read1, 'with'); + const spyRead2 = vi.spyOn(read2, 'with'); + + db.with(); + + expect(spyPrimary).toHaveBeenCalledTimes(0); + expect(spyRead1).toHaveBeenCalledTimes(1); + expect(spyRead2).toHaveBeenCalledTimes(0); + + db.with(); + expect(spyRead1).toHaveBeenCalledTimes(1); + expect(spyRead2).toHaveBeenCalledTimes(1); + }); + + it('single read replica with', () => { + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); + + const db = withReplicas(primaryDb, [read1]); + + const spyPrimary = vi.spyOn(primaryDb, 'with'); + const spyRead1 = vi.spyOn(read1, 'with'); + + db.with(); + + expect(spyPrimary).toHaveBeenCalledTimes(0); + expect(spyRead1).toHaveBeenCalledTimes(1); + + db.with(); + expect(spyRead1).toHaveBeenCalledTimes(2); + }); + + it('single read replica with + primary with', () => { + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); + + const db = withReplicas(primaryDb, [read1]); + + const spyPrimary = vi.spyOn(primaryDb, 'with'); + const spyRead1 = vi.spyOn(read1, 'with'); + + db.with(); + + expect(spyPrimary).toHaveBeenCalledTimes(0); + expect(spyRead1).toHaveBeenCalledTimes(1); + + db.$primary.with(); + expect(spyPrimary).toHaveBeenCalledTimes(1); + expect(spyRead1).toHaveBeenCalledTimes(1); + }); + + it('always first read with', () => { + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); + const read2 = drizzle.mock(); + + const db = withReplicas(primaryDb, [read1, read2], (replicas) => { + return replicas[0]!; + }); + + const spyPrimary = vi.spyOn(primaryDb, 'with'); + const spyRead1 = vi.spyOn(read1, 'with'); + const spyRead2 = vi.spyOn(read2, 'with'); + const obj1 = {} as any; + const obj2 = {} as any; + const obj3 = {} as any; + + db.with(obj1); + + expect(spyPrimary).toHaveBeenCalledTimes(0); + expect(spyRead1).toHaveBeenCalledTimes(1); + expect(spyRead2).toHaveBeenCalledTimes(0); + expect(spyRead1).toHaveBeenCalledWith(obj1); + + db.with(obj2, obj3); + expect(spyRead1).toHaveBeenCalledTimes(2); + expect(spyRead2).toHaveBeenCalledTimes(0); + expect(spyRead1).toHaveBeenCalledWith(obj2, obj3); + }); +}); + +describe('[update] replicas singlestore', () => { + it('primary update', () => { + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); + const read2 = drizzle.mock(); + + const db = withReplicas(primaryDb, [read1, read2]); + + const spyPrimary = vi.spyOn(primaryDb, 'update'); + const spyRead1 = vi.spyOn(read1, 'update'); + const spyRead2 = vi.spyOn(read2, 'update'); + + const query1 = db.update(users).set({ id: 1 }); + + expect(spyPrimary).toHaveBeenCalledTimes(1); + expect(spyRead1).toHaveBeenCalledTimes(0); + expect(spyRead2).toHaveBeenCalledTimes(0); + expect(query1.toSQL().sql).toEqual('update `users` set `id` = ?'); + + const query2 = db.update(users).set({ id: 1 }); + + expect(spyPrimary).toHaveBeenCalledTimes(2); + expect(spyRead1).toHaveBeenCalledTimes(0); + expect(spyRead2).toHaveBeenCalledTimes(0); + expect(query2.toSQL().sql).toEqual('update `users` set `id` = ?'); + + const query3 = db.$primary.update(users).set({ id: 1 }); + + expect(spyPrimary).toHaveBeenCalledTimes(3); + expect(spyRead1).toHaveBeenCalledTimes(0); + expect(spyRead2).toHaveBeenCalledTimes(0); + expect(query3.toSQL().sql).toEqual('update `users` set `id` = ?'); + }); +}); + +describe('[delete] replicas singlestore', () => { + it('primary delete', () => { + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); + const read2 = drizzle.mock(); + + const db = withReplicas(primaryDb, [read1, read2]); + + const spyPrimary = vi.spyOn(primaryDb, 'delete'); + const spyRead1 = vi.spyOn(read1, 'delete'); + const spyRead2 = vi.spyOn(read2, 'delete'); + + const query1 = db.delete(users); + + expect(spyPrimary).toHaveBeenCalledTimes(1); + expect(spyRead1).toHaveBeenCalledTimes(0); + expect(spyRead2).toHaveBeenCalledTimes(0); + expect(spyPrimary).toHaveBeenCalledWith(users); + expect(query1.toSQL().sql).toEqual('delete from `users`'); + + const query2 = db.delete(users); + + expect(spyPrimary).toHaveBeenCalledTimes(2); + expect(spyRead1).toHaveBeenCalledTimes(0); + expect(spyRead2).toHaveBeenCalledTimes(0); + expect(spyPrimary).toHaveBeenNthCalledWith(2, users); + expect(query2.toSQL().sql).toEqual('delete from `users`'); + + db.$primary.delete({} as any); + + expect(spyPrimary).toHaveBeenCalledTimes(3); + expect(spyRead1).toHaveBeenCalledTimes(0); + expect(spyRead2).toHaveBeenCalledTimes(0); + }); +}); + +describe('[insert] replicas singlestore', () => { + it('primary insert', () => { + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); + const read2 = drizzle.mock(); + + const db = withReplicas(primaryDb, [read1, read2]); + + const spyPrimary = vi.spyOn(primaryDb, 'insert'); + const spyRead1 = vi.spyOn(read1, 'insert'); + const spyRead2 = vi.spyOn(read2, 'insert'); + + const query = db.insert(users).values({ id: 1 }); + + expect(spyPrimary).toHaveBeenCalledTimes(1); + expect(spyRead1).toHaveBeenCalledTimes(0); + expect(spyRead2).toHaveBeenCalledTimes(0); + expect(spyPrimary).toHaveBeenCalledWith(users); + expect(query.toSQL().sql).toEqual('insert into `users` (`id`) values (?)'); + + db.insert(users); + + expect(spyPrimary).toHaveBeenCalledTimes(2); + expect(spyRead1).toHaveBeenCalledTimes(0); + expect(spyRead2).toHaveBeenCalledTimes(0); + expect(spyPrimary).toHaveBeenNthCalledWith(2, users); + + db.$primary.insert({} as any); + + expect(spyPrimary).toHaveBeenCalledTimes(3); + expect(spyRead1).toHaveBeenCalledTimes(0); + expect(spyRead2).toHaveBeenCalledTimes(0); + }); +}); + +describe('[execute] replicas singlestore', () => { + it('primary execute', async () => { + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); + const read2 = drizzle.mock(); + + const db = withReplicas(primaryDb, [read1, read2]); + + const spyPrimary = vi.spyOn(primaryDb, 'execute'); + const spyRead1 = vi.spyOn(read1, 'execute'); + const spyRead2 = vi.spyOn(read2, 'execute'); + + expect(db.execute(sql``)).rejects.toThrow(); + + // try { + // db.execute(sql``); + // } catch { /* empty */ } + + expect(spyPrimary).toHaveBeenCalledTimes(1); + expect(spyRead1).toHaveBeenCalledTimes(0); + expect(spyRead2).toHaveBeenCalledTimes(0); + + expect(db.execute(sql``)).rejects.toThrow(); + // try { + // db.execute(sql``); + // } catch { /* empty */ } + + expect(spyPrimary).toHaveBeenCalledTimes(2); + expect(spyRead1).toHaveBeenCalledTimes(0); + expect(spyRead2).toHaveBeenCalledTimes(0); + + expect(db.execute(sql``)).rejects.toThrow(); + // try { + // db.execute(sql``); + // } catch { /* empty */ } + + expect(spyPrimary).toHaveBeenCalledTimes(3); + expect(spyRead1).toHaveBeenCalledTimes(0); + expect(spyRead2).toHaveBeenCalledTimes(0); + }); +}); + +describe('[transaction] replicas singlestore', () => { + it('primary transaction', async () => { + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); + const read2 = drizzle.mock(); + + const db = withReplicas(primaryDb, [read1, read2]); + + const spyPrimary = vi.spyOn(primaryDb, 'transaction'); + const spyRead1 = vi.spyOn(read1, 'transaction'); + const spyRead2 = vi.spyOn(read2, 'transaction'); + const txFn1 = async (tx: any) => { + tx.select().from({} as any); + }; + + expect(db.transaction(txFn1)).rejects.toThrow(); + + expect(spyPrimary).toHaveBeenCalledTimes(1); + expect(spyRead1).toHaveBeenCalledTimes(0); + expect(spyRead2).toHaveBeenCalledTimes(0); + expect(spyPrimary).toHaveBeenCalledWith(txFn1); + + const txFn2 = async (tx: any) => { + tx.select().from({} as any); + }; + + expect(db.transaction(txFn2)).rejects.toThrow(); + + expect(spyPrimary).toHaveBeenCalledTimes(2); + expect(spyRead1).toHaveBeenCalledTimes(0); + expect(spyRead2).toHaveBeenCalledTimes(0); + expect(spyPrimary).toHaveBeenNthCalledWith(2, txFn2); + + expect(db.transaction(async (tx) => { + tx.select().from({} as any); + })).rejects.toThrow(); + + expect(spyPrimary).toHaveBeenCalledTimes(3); + expect(spyRead1).toHaveBeenCalledTimes(0); + expect(spyRead2).toHaveBeenCalledTimes(0); + }); +}); + +// We are waiting for SingleStore support for `json_array` function +// describe('[findFirst] read replicas singlestore', () => { +// // it('primary findFirst', () => { +// // const primaryDb = drizzle.mock({ schema: { usersTable } }); +// // const read1 = drizzle.mock({ schema: { usersTable } }); +// // const read2 = drizzle.mock({ schema: { usersTable } }); + +// // const db = withReplicas(primaryDb, [read1, read2]); + +// // const spyPrimary = vi.spyOn(primaryDb['query']['usersTable'], 'findFirst'); +// // const spyRead1 = vi.spyOn(read1['query']['usersTable'], 'findFirst'); +// // const spyRead2 = vi.spyOn(read2['query']['usersTable'], 'findFirst'); +// // const obj = {} as any; + +// // db.$primary.query.usersTable.findFirst(obj); + +// // expect(spyPrimary).toHaveBeenCalledTimes(1); +// // expect(spyRead1).toHaveBeenCalledTimes(0); +// // expect(spyRead2).toHaveBeenCalledTimes(0); +// // expect(spyPrimary).toHaveBeenCalledWith(obj); +// // }); + +// // it('random replica findFirst', () => { +// // const primaryDb = drizzle.mock({ schema: { usersTable } }); +// // const read1 = drizzle.mock({ schema: { usersTable } }); +// // const read2 = drizzle.mock({ schema: { usersTable } }); + +// // const randomMockReplica = vi.fn().mockReturnValueOnce(read1).mockReturnValueOnce(read2); + +// // const db = withReplicas(primaryDb, [read1, read2], () => { +// // return randomMockReplica(); +// // }); + +// // const spyPrimary = vi.spyOn(primaryDb['query']['usersTable'], 'findFirst'); +// // const spyRead1 = vi.spyOn(read1['query']['usersTable'], 'findFirst'); +// // const spyRead2 = vi.spyOn(read2['query']['usersTable'], 'findFirst'); +// // const par1 = {} as any; + +// // db.query.usersTable.findFirst(par1); + +// // expect(spyPrimary).toHaveBeenCalledTimes(0); +// // expect(spyRead1).toHaveBeenCalledTimes(1); +// // expect(spyRead2).toHaveBeenCalledTimes(0); +// // expect(spyRead1).toHaveBeenCalledWith(par1); + +// // const query = db.query.usersTable.findFirst(); +// // expect(spyRead1).toHaveBeenCalledTimes(1); +// // expect(spyRead2).toHaveBeenCalledTimes(1); +// // expect(query.toSQL().sql).toEqual('select `id`, `name`, `verified` from `users` `usersTable` limit ?'); +// // }); + +// // We are waiting for SingleStore support for `json_array` function +// // it('single read replica findFirst', () => { +// // const primaryDb = drizzle.mock({ schema: { usersTable } }); +// // const read1 = drizzle.mock({ schema: { usersTable } }); + +// // const db = withReplicas(primaryDb, [read1]); + +// // const spyPrimary = vi.spyOn(primaryDb['query']['usersTable'], 'findFirst'); +// // const spyRead1 = vi.spyOn(read1['query']['usersTable'], 'findFirst'); + +// // db.query.usersTable.findFirst(); + +// // expect(spyPrimary).toHaveBeenCalledTimes(0); +// // expect(spyRead1).toHaveBeenCalledTimes(1); + +// // db.query.usersTable.findFirst(); +// // expect(spyRead1).toHaveBeenCalledTimes(2); +// // }); + +// // We are waiting for SingleStore support for `json_array` function +// // it('single read replica findFirst + primary findFirst', () => { +// // const primaryDb = drizzle.mock({ schema: { usersTable } }); +// // const read1 = drizzle.mock({ schema: { usersTable } }); + +// // const db = withReplicas(primaryDb, [read1]); + +// // const spyPrimary = vi.spyOn(primaryDb['query']['usersTable'], 'findFirst'); +// // const spyRead1 = vi.spyOn(read1['query']['usersTable'], 'findFirst'); + +// // db.query.usersTable.findFirst(); + +// // expect(spyPrimary).toHaveBeenCalledTimes(0); +// // expect(spyRead1).toHaveBeenCalledTimes(1); + +// // db.$primary.query.usersTable.findFirst(); +// // expect(spyPrimary).toHaveBeenCalledTimes(1); +// // expect(spyRead1).toHaveBeenCalledTimes(1); +// // }); + +// // We are waiting for SingleStore support for `json_array` function +// // it('always first read findFirst', () => { +// // const primaryDb = drizzle.mock({ schema: { usersTable } }); +// // const read1 = drizzle.mock({ schema: { usersTable } }); +// // const read2 = drizzle.mock({ schema: { usersTable } }); + +// // const db = withReplicas(primaryDb, [read1, read2], (replicas) => { +// // return replicas[0]!; +// // }); + +// // const spyPrimary = vi.spyOn(primaryDb['query']['usersTable'], 'findFirst'); +// // const spyRead1 = vi.spyOn(read1['query']['usersTable'], 'findFirst'); +// // const spyRead2 = vi.spyOn(read2['query']['usersTable'], 'findFirst'); + +// // db.query.usersTable.findFirst(); + +// // expect(spyPrimary).toHaveBeenCalledTimes(0); +// // expect(spyRead1).toHaveBeenCalledTimes(1); +// // expect(spyRead2).toHaveBeenCalledTimes(0); + +// // db.query.usersTable.findFirst(); +// // expect(spyRead1).toHaveBeenCalledTimes(2); +// // expect(spyRead2).toHaveBeenCalledTimes(0); +// // }); +// }); + +// describe('[findMany] read replicas singlestore', () => { +// // We are waiting for SingleStore support for `json_array` function +// // it('primary findMany', () => { +// // const primaryDb = drizzle.mock({ schema: { usersTable } }); +// // const read1 = drizzle.mock({ schema: { usersTable } }); +// // const read2 = drizzle.mock({ schema: { usersTable } }); + +// // const db = withReplicas(primaryDb, [read1, read2]); + +// // const spyPrimary = vi.spyOn(primaryDb['query']['usersTable'], 'findMany'); +// // const spyRead1 = vi.spyOn(read1['query']['usersTable'], 'findMany'); +// // const spyRead2 = vi.spyOn(read2['query']['usersTable'], 'findMany'); +// // const obj = {} as any; + +// // const query = db.$primary.query.usersTable.findMany(obj); + +// // expect(spyPrimary).toHaveBeenCalledTimes(1); +// // expect(spyRead1).toHaveBeenCalledTimes(0); +// // expect(spyRead2).toHaveBeenCalledTimes(0); +// // expect(spyPrimary).toHaveBeenCalledWith(obj); +// // expect(query.toSQL().sql).toEqual('select `id`, `name`, `verified` from `users` `usersTable`'); +// // }); + +// // We are waiting for SingleStore support for `json_array` function +// // it('random replica findMany', () => { +// // const primaryDb = drizzle.mock({ schema: { usersTable } }); +// // const read1 = drizzle.mock({ schema: { usersTable } }); +// // const read2 = drizzle.mock({ schema: { usersTable } }); + +// // const randomMockReplica = vi.fn().mockReturnValueOnce(read1).mockReturnValueOnce(read2); + +// // const db = withReplicas(primaryDb, [read1, read2], () => { +// // return randomMockReplica(); +// // }); + +// // const spyPrimary = vi.spyOn(primaryDb['query']['usersTable'], 'findMany'); +// // const spyRead1 = vi.spyOn(read1['query']['usersTable'], 'findMany'); +// // const spyRead2 = vi.spyOn(read2['query']['usersTable'], 'findMany'); +// // const obj1 = {} as any; +// // const obj2 = {} as any; + +// // const query1 = db.query.usersTable.findMany(obj1); + +// // expect(spyPrimary).toHaveBeenCalledTimes(0); +// // expect(spyRead1).toHaveBeenCalledTimes(1); +// // expect(spyRead2).toHaveBeenCalledTimes(0); +// // expect(query1.toSQL().sql).toEqual('select `id`, `name`, `verified` from `users` `usersTable`'); +// // expect(spyRead1).toHaveBeenCalledWith(obj1); + +// // const query2 = db.query.usersTable.findMany(obj2); + +// // expect(spyRead1).toHaveBeenCalledTimes(1); +// // expect(spyRead2).toHaveBeenCalledTimes(1); +// // expect(query2.toSQL().sql).toEqual('select `id`, `name`, `verified` from `users` `usersTable`'); +// // expect(spyRead2).toHaveBeenCalledWith(obj2); +// // }); + +// // We are waiting for SingleStore support for `json_array` function +// // it('single read replica findMany', () => { +// // const primaryDb = drizzle.mock({ schema: { usersTable } }); +// // const read1 = drizzle.mock({ schema: { usersTable } }); + +// // const db = withReplicas(primaryDb, [read1]); + +// // const spyPrimary = vi.spyOn(primaryDb['query']['usersTable'], 'findMany'); +// // const spyRead1 = vi.spyOn(read1['query']['usersTable'], 'findMany'); +// // const obj1 = {} as any; +// // const obj2 = {} as any; + +// // const query1 = db.query.usersTable.findMany(obj1); + +// // expect(spyPrimary).toHaveBeenCalledTimes(0); +// // expect(spyRead1).toHaveBeenCalledTimes(1); +// // expect(spyRead1).toHaveBeenCalledWith(obj1); +// // expect(query1.toSQL().sql).toEqual('select `id`, `name`, `verified` from `users` `usersTable`'); + +// // const query2 = db.query.usersTable.findMany(obj2); +// // expect(spyRead1).toHaveBeenCalledTimes(2); +// // expect(spyRead1).toHaveBeenNthCalledWith(2, obj2); +// // expect(query2.toSQL().sql).toEqual('select `id`, `name`, `verified` from `users` `usersTable`'); +// // }); + +// // We are waiting for SingleStore support for `json_array` function +// // it('single read replica findMany + primary findMany', () => { +// // const primaryDb = drizzle.mock({ schema: { usersTable } }); +// // const read1 = drizzle.mock({ schema: { usersTable } }); + +// // const db = withReplicas(primaryDb, [read1]); + +// // const spyPrimary = vi.spyOn(primaryDb['query']['usersTable'], 'findMany'); +// // const spyRead1 = vi.spyOn(read1['query']['usersTable'], 'findMany'); +// // const obj1 = {} as any; +// // const obj2 = {} as any; + +// // const query1 = db.query.usersTable.findMany(obj1); + +// // expect(spyPrimary).toHaveBeenCalledTimes(0); +// // expect(spyRead1).toHaveBeenCalledTimes(1); +// // expect(spyRead1).toHaveBeenCalledWith(obj1); +// // expect(query1.toSQL().sql).toEqual('select `id`, `name`, `verified` from `users` `usersTable`'); + +// // const query2 = db.$primary.query.usersTable.findMany(obj2); + +// // expect(spyPrimary).toHaveBeenCalledTimes(1); +// // expect(spyRead1).toHaveBeenCalledTimes(1); +// // expect(spyPrimary).toHaveBeenNthCalledWith(1, obj2); +// // expect(query2.toSQL().sql).toEqual('select `id`, `name`, `verified` from `users` `usersTable`'); +// // }); + +// // We are waiting for SingleStore support for `json_array` function +// // it('always first read findMany', () => { +// // const primaryDb = drizzle.mock({ schema: { usersTable } }); +// // const read1 = drizzle.mock({ schema: { usersTable } }); +// // const read2 = drizzle.mock({ schema: { usersTable } }); + +// // const db = withReplicas(primaryDb, [read1, read2], (replicas) => { +// // return replicas[0]!; +// // }); + +// // const spyPrimary = vi.spyOn(primaryDb['query']['usersTable'], 'findMany'); +// // const spyRead1 = vi.spyOn(read1['query']['usersTable'], 'findMany'); +// // const spyRead2 = vi.spyOn(read2['query']['usersTable'], 'findMany'); +// // const obj1 = {} as any; +// // const obj2 = {} as any; + +// // const query1 = db.query.usersTable.findMany(obj1); + +// // expect(spyPrimary).toHaveBeenCalledTimes(0); +// // expect(spyRead1).toHaveBeenCalledTimes(1); +// // expect(spyRead2).toHaveBeenCalledTimes(0); +// // expect(spyRead1).toHaveBeenCalledWith(obj1); +// // expect(query1.toSQL().sql).toEqual('select `id`, `name`, `verified` from `users` `usersTable`'); + +// // const query2 = db.query.usersTable.findMany(obj2); +// // expect(spyRead1).toHaveBeenCalledTimes(2); +// // expect(spyRead2).toHaveBeenCalledTimes(0); +// // expect(spyRead1).toHaveBeenNthCalledWith(2, obj2); +// // expect(query2.toSQL().sql).toEqual('select `id`, `name`, `verified` from `users` `usersTable`'); +// // }); +// }); diff --git a/integration-tests/tests/singlestore/singlestore-common.ts b/integration-tests/tests/singlestore/singlestore-common.ts new file mode 100644 index 000000000..6335bf9ec --- /dev/null +++ b/integration-tests/tests/singlestore/singlestore-common.ts @@ -0,0 +1,3429 @@ +/* eslint-disable @typescript-eslint/no-unused-vars */ +import 'dotenv/config'; +import Docker from 'dockerode'; +import { + and, + asc, + avg, + avgDistinct, + count, + countDistinct, + eq, + exists, + getTableColumns, + gt, + gte, + inArray, + lt, + max, + min, + Name, + notInArray, + placeholder, + sql, + sum, + sumDistinct, + TransactionRollbackError, +} from 'drizzle-orm'; +import type { SingleStoreDatabase } from 'drizzle-orm/singlestore-core'; +import { + alias, + bigint, + boolean, + date, + datetime, + decimal, + except, + getTableConfig, + int, + intersect, + json, + mediumint, + primaryKey, + serial, + singlestoreEnum, + singlestoreSchema, + singlestoreTable, + singlestoreTableCreator, + /* singlestoreView, */ + smallint, + text, + time, + timestamp, + tinyint, + union, + unionAll, + unique, + uniqueIndex, + uniqueKeyName, + varchar, + year, +} from 'drizzle-orm/singlestore-core'; +import { migrate } from 'drizzle-orm/singlestore/migrator'; +import getPort from 'get-port'; +import { v4 as uuid } from 'uuid'; +import { afterAll, beforeEach, describe, expect, expectTypeOf, test } from 'vitest'; +import { Expect, toLocalDate } from '~/utils.ts'; +import type { Equal } from '~/utils.ts'; + +type TestSingleStoreDB = SingleStoreDatabase; + +declare module 'vitest' { + interface TestContext { + singlestore: { + db: TestSingleStoreDB; + }; + } +} + +const ENABLE_LOGGING = false; + +const usersTable = singlestoreTable('userstest', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + verified: boolean('verified').notNull().default(false), + jsonb: json('jsonb').$type(), + createdAt: timestamp('created_at').notNull().defaultNow(), +}); + +const users2Table = singlestoreTable('users2', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: int('city_id'), +}); + +const citiesTable = singlestoreTable('cities', { + id: serial('id').primaryKey(), + name: text('name').notNull(), +}); + +const usersOnUpdate = singlestoreTable('users_on_update', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + updateCounter: int('update_counter').default(sql`1`).$onUpdateFn(() => sql`update_counter + 1`), + updatedAt: datetime('updated_at', { mode: 'date' }).$onUpdateFn(() => new Date()), + alwaysNull: text('always_null').$type().$onUpdateFn(() => null), // need to add $type because $onUpdate add a default value +}); + +const datesTable = singlestoreTable('datestable', { + date: date('date'), + dateAsString: date('date_as_string', { mode: 'string' }), + time: time('time'), + datetime: datetime('datetime'), + datetimeAsString: datetime('datetime_as_string', { mode: 'string' }), + timestamp: timestamp('timestamp'), + timestampAsString: timestamp('timestamp_as_string', { mode: 'string' }), + year: year('year'), +}); + +const coursesTable = singlestoreTable('courses', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + categoryId: int('category_id'), +}); + +const courseCategoriesTable = singlestoreTable('course_categories', { + id: serial('id').primaryKey(), + name: text('name').notNull(), +}); + +const orders = singlestoreTable('orders', { + id: serial('id').primaryKey(), + region: text('region').notNull(), + product: text('product').notNull().$default(() => 'random_string'), + amount: int('amount').notNull(), + quantity: int('quantity').notNull(), +}); + +const usersMigratorTable = singlestoreTable('users12', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + email: text('email').notNull(), +}, (table) => { + return { + name: uniqueIndex('').on(table.name).using('btree'), + }; +}); + +// To test aggregate functions +const aggregateTable = singlestoreTable('aggregate_table', { + id: serial('id').notNull(), + name: text('name').notNull(), + a: int('a'), + b: int('b'), + c: int('c'), + nullOnly: int('null_only'), +}); + +// To test another schema and multischema +const mySchema = singlestoreSchema(`mySchema`); + +const usersMySchemaTable = mySchema.table('userstest', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + verified: boolean('verified').notNull().default(false), + jsonb: json('jsonb').$type(), + createdAt: timestamp('created_at').notNull().defaultNow(), +}); + +const users2MySchemaTable = mySchema.table('users2', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: int('city_id'), +}); + +const citiesMySchemaTable = mySchema.table('cities', { + id: serial('id').primaryKey(), + name: text('name').notNull(), +}); + +let singlestoreContainer: Docker.Container; +export async function createDockerDB(): Promise<{ connectionString: string; container: Docker.Container }> { + const docker = new Docker(); + const port = await getPort({ port: 3306 }); + const image = 'ghcr.io/singlestore-labs/singlestoredb-dev:latest'; + + const pullStream = await docker.pull(image); + await new Promise((resolve, reject) => + docker.modem.followProgress(pullStream, (err) => (err ? reject(err) : resolve(err))) + ); + + singlestoreContainer = await docker.createContainer({ + Image: image, + Env: ['ROOT_PASSWORD=singlestore'], + name: `drizzle-integration-tests-${uuid()}`, + HostConfig: { + AutoRemove: true, + PortBindings: { + '3306/tcp': [{ HostPort: `${port}` }], + }, + }, + }); + + await singlestoreContainer.start(); + await new Promise((resolve) => setTimeout(resolve, 4000)); + + return { + connectionString: `singlestore://root:singlestore@localhost:${port}/`, + container: singlestoreContainer, + }; +} + +export function tests(driver?: string) { + describe('common', () => { + afterAll(async () => { + await singlestoreContainer?.stop().catch(console.error); + }); + + beforeEach(async (ctx) => { + const { db } = ctx.singlestore; + await db.execute(sql`drop table if exists userstest`); + await db.execute(sql`drop table if exists users2`); + await db.execute(sql`drop table if exists cities`); + + await db.execute(sql`drop schema if exists \`mySchema\``); + await db.execute(sql`create schema if not exists \`mySchema\``); + + await db.execute( + sql` + create table userstest ( + id serial primary key, + name text not null, + verified boolean not null default false, + jsonb json, + created_at timestamp not null default now() + ) + `, + ); + + await db.execute( + sql` + create table users2 ( + id serial primary key, + name text not null, + city_id int + ) + `, + ); + + await db.execute( + sql` + create table cities ( + id serial primary key, + name text not null + ) + `, + ); + + // mySchema + await db.execute( + sql` + create table \`mySchema\`.\`userstest\` ( + \`id\` serial primary key, + \`name\` text not null, + \`verified\` boolean not null default false, + \`jsonb\` json, + \`created_at\` timestamp not null default now() + ) + `, + ); + + await db.execute( + sql` + create table \`mySchema\`.\`cities\` ( + \`id\` serial primary key, + \`name\` text not null + ) + `, + ); + + await db.execute( + sql` + create table \`mySchema\`.\`users2\` ( + \`id\` serial primary key, + \`name\` text not null, + \`city_id\` int + ) + `, + ); + }); + + async function setupReturningFunctionsTest(db: SingleStoreDatabase) { + await db.execute(sql`drop table if exists \`users_default_fn\``); + await db.execute( + sql` + create table \`users_default_fn\` ( + \`id\` varchar(256) primary key, + \`name\` text not null + ); + `, + ); + } + + async function setupSetOperationTest(db: TestSingleStoreDB) { + await db.execute(sql`drop table if exists \`users2\``); + await db.execute(sql`drop table if exists \`cities\``); + await db.execute( + sql` + create table \`users2\` ( + \`id\` serial primary key, + \`name\` text not null, + \`city_id\` int + ) + `, + ); + + await db.execute( + sql` + create table \`cities\` ( + \`id\` serial primary key, + \`name\` text not null + ) + `, + ); + + await db.insert(citiesTable).values([ + { id: 1, name: 'New York' }, + { id: 2, name: 'London' }, + { id: 3, name: 'Tampa' }, + ]); + + await db.insert(users2Table).values([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 2 }, + { id: 3, name: 'Jack', cityId: 3 }, + { id: 4, name: 'Peter', cityId: 3 }, + { id: 5, name: 'Ben', cityId: 2 }, + { id: 6, name: 'Jill', cityId: 1 }, + { id: 7, name: 'Mary', cityId: 2 }, + { id: 8, name: 'Sally', cityId: 1 }, + ]); + } + + async function setupAggregateFunctionsTest(db: TestSingleStoreDB) { + await db.execute(sql`drop table if exists \`aggregate_table\``); + await db.execute( + sql` + create table \`aggregate_table\` ( + \`id\` integer primary key auto_increment not null, + \`name\` text not null, + \`a\` integer, + \`b\` integer, + \`c\` integer, + \`null_only\` integer + ); + `, + ); + await db.insert(aggregateTable).values([ + { id: 1, name: 'value 1', a: 5, b: 10, c: 20 }, + { id: 2, name: 'value 1', a: 5, b: 20, c: 30 }, + { id: 3, name: 'value 2', a: 10, b: 50, c: 60 }, + { id: 4, name: 'value 3', a: 20, b: 20, c: null }, + { id: 5, name: 'value 4', a: null, b: 90, c: 120 }, + { id: 6, name: 'value 5', a: 80, b: 10, c: null }, + { id: 7, name: 'value 6', a: null, b: null, c: 150 }, + ]); + } + + test('table config: unsigned ints', async () => { + const unsignedInts = singlestoreTable('cities1', { + bigint: bigint('bigint', { mode: 'number', unsigned: true }), + int: int('int', { unsigned: true }), + smallint: smallint('smallint', { unsigned: true }), + mediumint: mediumint('mediumint', { unsigned: true }), + tinyint: tinyint('tinyint', { unsigned: true }), + }); + + const tableConfig = getTableConfig(unsignedInts); + + const bigintColumn = tableConfig.columns.find((c) => c.name === 'bigint')!; + const intColumn = tableConfig.columns.find((c) => c.name === 'int')!; + const smallintColumn = tableConfig.columns.find((c) => c.name === 'smallint')!; + const mediumintColumn = tableConfig.columns.find((c) => c.name === 'mediumint')!; + const tinyintColumn = tableConfig.columns.find((c) => c.name === 'tinyint')!; + + expect(bigintColumn.getSQLType()).toBe('bigint unsigned'); + expect(intColumn.getSQLType()).toBe('int unsigned'); + expect(smallintColumn.getSQLType()).toBe('smallint unsigned'); + expect(mediumintColumn.getSQLType()).toBe('mediumint unsigned'); + expect(tinyintColumn.getSQLType()).toBe('tinyint unsigned'); + }); + + test('table config: signed ints', async () => { + const unsignedInts = singlestoreTable('cities1', { + bigint: bigint('bigint', { mode: 'number' }), + int: int('int'), + smallint: smallint('smallint'), + mediumint: mediumint('mediumint'), + tinyint: tinyint('tinyint'), + }); + + const tableConfig = getTableConfig(unsignedInts); + + const bigintColumn = tableConfig.columns.find((c) => c.name === 'bigint')!; + const intColumn = tableConfig.columns.find((c) => c.name === 'int')!; + const smallintColumn = tableConfig.columns.find((c) => c.name === 'smallint')!; + const mediumintColumn = tableConfig.columns.find((c) => c.name === 'mediumint')!; + const tinyintColumn = tableConfig.columns.find((c) => c.name === 'tinyint')!; + + expect(bigintColumn.getSQLType()).toBe('bigint'); + expect(intColumn.getSQLType()).toBe('int'); + expect(smallintColumn.getSQLType()).toBe('smallint'); + expect(mediumintColumn.getSQLType()).toBe('mediumint'); + expect(tinyintColumn.getSQLType()).toBe('tinyint'); + }); + + test('table config: primary keys name', async () => { + const table = singlestoreTable('cities', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + state: text('state'), + }, (t) => ({ + f: primaryKey({ columns: [t.id, t.name], name: 'custom_pk' }), + })); + + const tableConfig = getTableConfig(table); + + expect(tableConfig.primaryKeys).toHaveLength(1); + expect(tableConfig.primaryKeys[0]!.getName()).toBe('custom_pk'); + }); + + test('table configs: unique third param', async () => { + const cities1Table = singlestoreTable('cities1', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + state: text('state'), + }, (t) => ({ + f: unique('custom_name').on(t.name, t.state), + f1: unique('custom_name1').on(t.name, t.state), + })); + + const tableConfig = getTableConfig(cities1Table); + + expect(tableConfig.uniqueConstraints).toHaveLength(2); + + expect(tableConfig.uniqueConstraints[0]?.name).toBe('custom_name'); + expect(tableConfig.uniqueConstraints[0]?.columns.map((t) => t.name)).toEqual(['name', 'state']); + + expect(tableConfig.uniqueConstraints[1]?.name).toBe('custom_name1'); + expect(tableConfig.uniqueConstraints[1]?.columns.map((t) => t.name)).toEqual(['name', 'state']); + }); + + test('table configs: unique in column', async () => { + const cities1Table = singlestoreTable('cities1', { + id: serial('id').primaryKey(), + name: text('name').notNull().unique(), + state: text('state').unique('custom'), + field: text('field').unique('custom_field'), + }); + + const tableConfig = getTableConfig(cities1Table); + + const columnName = tableConfig.columns.find((it) => it.name === 'name'); + expect(columnName?.uniqueName).toBe(uniqueKeyName(cities1Table, [columnName!.name])); + expect(columnName?.isUnique).toBeTruthy(); + + const columnState = tableConfig.columns.find((it) => it.name === 'state'); + expect(columnState?.uniqueName).toBe('custom'); + expect(columnState?.isUnique).toBeTruthy(); + + const columnField = tableConfig.columns.find((it) => it.name === 'field'); + expect(columnField?.uniqueName).toBe('custom_field'); + expect(columnField?.isUnique).toBeTruthy(); + }); + + test('select all fields', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable).values({ id: 1, name: 'John' }); + const result = await db.select().from(usersTable); + + expect(result[0]!.createdAt).toBeInstanceOf(Date); + // not timezone based timestamp, thats why it should not work here + // t.assert(Math.abs(result[0]!.createdAt.getTime() - now) < 2000); + expect(result).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); + }); + + test('select sql', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable).values({ name: 'John' }); + const users = await db.select({ + name: sql`upper(${usersTable.name})`, + }).from(usersTable); + + expect(users).toEqual([{ name: 'JOHN' }]); + }); + + test('select typed sql', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable).values({ name: 'John' }); + const users = await db.select({ + name: sql`upper(${usersTable.name})`, + }).from(usersTable); + + expect(users).toEqual([{ name: 'JOHN' }]); + }); + + test('select with empty array in inArray', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable).values([{ id: 1, name: 'John' }, { id: 2, name: 'Jane' }, { id: 3, name: 'Jane' }]); + const result = await db + .select({ + name: sql`upper(${usersTable.name})`, + }) + .from(usersTable) + .where(inArray(usersTable.id, [])) + .orderBy(asc(usersTable.id)); + + expect(result).toEqual([]); + }); + + test('select with empty array in notInArray', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable).values([{ id: 1, name: 'John' }, { id: 2, name: 'Jane' }, { id: 3, name: 'Jane' }]); + const result = await db + .select({ + name: sql`upper(${usersTable.name})`, + }) + .from(usersTable) + .where(notInArray(usersTable.id, [])) + .orderBy(asc(usersTable.id)); + + expect(result).toEqual([{ name: 'JOHN' }, { name: 'JANE' }, { name: 'JANE' }]); + }); + + test('select distinct', async (ctx) => { + const { db } = ctx.singlestore; + + const usersDistinctTable = singlestoreTable('users_distinct', { + id: int('id').notNull(), + name: text('name').notNull(), + }); + + await db.execute(sql`drop table if exists ${usersDistinctTable}`); + await db.execute(sql`create table ${usersDistinctTable} (id int, name text)`); + + await db.insert(usersDistinctTable).values([ + { id: 1, name: 'John' }, + { id: 1, name: 'John' }, + { id: 2, name: 'John' }, + { id: 1, name: 'Jane' }, + ]); + const users = await db.selectDistinct().from(usersDistinctTable).orderBy( + usersDistinctTable.id, + usersDistinctTable.name, + ); + + await db.execute(sql`drop table ${usersDistinctTable}`); + + expect(users).toEqual([{ id: 1, name: 'Jane' }, { id: 1, name: 'John' }, { id: 2, name: 'John' }]); + }); + + test('insert returning sql', async (ctx) => { + const { db } = ctx.singlestore; + + const [result, _] = await db.insert(usersTable).values({ id: 1, name: 'John' }); + + expect(result.insertId).toBe(1); + }); + + test('delete returning sql', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable).values({ name: 'John' }); + const users = await db.delete(usersTable).where(eq(usersTable.name, 'John')); + + expect(users[0].affectedRows).toBe(1); + }); + + test('update returning sql', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable).values({ name: 'John' }); + const users = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')); + + expect(users[0].changedRows).toBe(1); + }); + + test('update with returning all fields', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable).values({ id: 1, name: 'John' }); + const updatedUsers = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')); + + const users = await db.select().from(usersTable).where(eq(usersTable.id, 1)); + + expect(updatedUsers[0].changedRows).toBe(1); + + expect(users[0]!.createdAt).toBeInstanceOf(Date); + // not timezone based timestamp, thats why it should not work here + // t.assert(Math.abs(users[0]!.createdAt.getTime() - now) < 2000); + expect(users).toEqual([{ id: 1, name: 'Jane', verified: false, jsonb: null, createdAt: users[0]!.createdAt }]); + }); + + test('update with returning partial', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable).values({ id: 1, name: 'John' }); + const updatedUsers = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')); + + const users = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).where( + eq(usersTable.id, 1), + ); + + expect(updatedUsers[0].changedRows).toBe(1); + + expect(users).toEqual([{ id: 1, name: 'Jane' }]); + }); + + test('delete with returning all fields', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable).values({ name: 'John' }); + const deletedUser = await db.delete(usersTable).where(eq(usersTable.name, 'John')); + + expect(deletedUser[0].affectedRows).toBe(1); + }); + + test('delete with returning partial', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable).values({ name: 'John' }); + const deletedUser = await db.delete(usersTable).where(eq(usersTable.name, 'John')); + + expect(deletedUser[0].affectedRows).toBe(1); + }); + + test('insert + select', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable).values({ id: 1, name: 'John' }); + const result = await db.select().from(usersTable); + expect(result).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); + + await db.insert(usersTable).values({ id: 2, name: 'Jane' }); + const result2 = await db.select().from(usersTable).orderBy(asc(usersTable.id)); + expect(result2).toEqual([ + { id: 1, name: 'John', verified: false, jsonb: null, createdAt: result2[0]!.createdAt }, + { id: 2, name: 'Jane', verified: false, jsonb: null, createdAt: result2[1]!.createdAt }, + ]); + }); + + test('json insert', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable).values({ id: 1, name: 'John', jsonb: ['foo', 'bar'] }); + const result = await db.select({ + id: usersTable.id, + name: usersTable.name, + jsonb: usersTable.jsonb, + }).from(usersTable); + + expect(result).toEqual([{ id: 1, name: 'John', jsonb: ['foo', 'bar'] }]); + }); + + test('insert with overridden default values', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable).values({ id: 1, name: 'John', verified: true }); + const result = await db.select().from(usersTable); + + expect(result).toEqual([{ id: 1, name: 'John', verified: true, jsonb: null, createdAt: result[0]!.createdAt }]); + }); + + test('insert many', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable).values([ + { id: 1, name: 'John' }, + { id: 2, name: 'Bruce', jsonb: ['foo', 'bar'] }, + { id: 3, name: 'Jane' }, + { id: 4, name: 'Austin', verified: true }, + ]); + const result = await db.select({ + id: usersTable.id, + name: usersTable.name, + jsonb: usersTable.jsonb, + verified: usersTable.verified, + }).from(usersTable) + .orderBy(asc(usersTable.id)); + + expect(result).toEqual([ + { id: 1, name: 'John', jsonb: null, verified: false }, + { id: 2, name: 'Bruce', jsonb: ['foo', 'bar'], verified: false }, + { id: 3, name: 'Jane', jsonb: null, verified: false }, + { id: 4, name: 'Austin', jsonb: null, verified: true }, + ]); + }); + + test('insert many with returning', async (ctx) => { + const { db } = ctx.singlestore; + + const result = await db.insert(usersTable).values([ + { name: 'John' }, + { name: 'Bruce', jsonb: ['foo', 'bar'] }, + { name: 'Jane' }, + { name: 'Austin', verified: true }, + ]); + + expect(result[0].affectedRows).toBe(4); + }); + + test('select with group by as field', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable).values([{ id: 1, name: 'John' }, { id: 2, name: 'Jane' }, { id: 3, name: 'Jane' }]); + + const result = await db.select({ name: usersTable.name }).from(usersTable) + .groupBy(usersTable.name) + .orderBy(asc(usersTable.id)); + + expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }]); + }); + + test('select with exists', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable).values([{ id: 1, name: 'John' }, { id: 2, name: 'Jane' }, { id: 3, name: 'Jane' }]); + + const user = alias(usersTable, 'user'); + const result = await db.select({ name: usersTable.name }).from(usersTable).where( + exists( + db.select({ one: sql`1` }).from(user).where(and(eq(usersTable.name, 'John'), eq(user.id, usersTable.id))), + ), + ) + .orderBy(asc(usersTable.id)); + + expect(result).toEqual([{ name: 'John' }]); + }); + + test('select with group by as sql', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable).values([{ id: 1, name: 'John' }, { id: 2, name: 'Jane' }, { id: 3, name: 'Jane' }]); + + const result = await db.select({ name: usersTable.name }).from(usersTable) + .groupBy(sql`${usersTable.name}`) + .orderBy(asc(usersTable.id)); + + expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }]); + }); + + test('$default function', async (ctx) => { + const { db } = ctx.singlestore; + + await db.execute(sql`drop table if exists \`orders\``); + await db.execute( + sql` + create table \`orders\` ( + \`id\` serial primary key, + \`region\` text not null, + \`product\` text not null, + \`amount\` int not null, + \`quantity\` int not null + ) + `, + ); + + await db.insert(orders).values({ id: 1, region: 'Ukraine', amount: 1, quantity: 1 }); + const selectedOrder = await db.select().from(orders); + + expect(selectedOrder).toEqual([{ + id: 1, + amount: 1, + quantity: 1, + region: 'Ukraine', + product: 'random_string', + }]); + }); + + test('$default with empty array', async (ctx) => { + const { db } = ctx.singlestore; + + await db.execute(sql`drop table if exists \`s_orders\``); + await db.execute( + sql` + create table \`s_orders\` ( + \`id\` serial primary key, + \`region\` text default 'Ukraine', + \`product\` text not null + ) + `, + ); + + const users = singlestoreTable('s_orders', { + id: serial('id').primaryKey(), + region: text('region').default('Ukraine'), + product: text('product').$defaultFn(() => 'random_string'), + }); + + await db.insert(users).values({ id: 1 }); + const selectedOrder = await db.select().from(users); + + expect(selectedOrder).toEqual([{ + id: 1, + region: 'Ukraine', + product: 'random_string', + }]); + }); + + test('select with group by as sql + column', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable).values([{ id: 1, name: 'John' }, { id: 2, name: 'Jane' }, { id: 3, name: 'Jane' }]); + + const result = await db.select({ name: usersTable.name }).from(usersTable) + .groupBy(sql`${usersTable.name}`, usersTable.id) + .orderBy(asc(usersTable.id)); + + expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); + }); + + test('select with group by as column + sql', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable).values([{ id: 1, name: 'John' }, { id: 2, name: 'Jane' }, { id: 3, name: 'Jane' }]); + + const result = await db.select({ name: usersTable.name }).from(usersTable) + .groupBy(usersTable.id, sql`${usersTable.name}`) + .orderBy(asc(usersTable.id)); + + expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); + }); + + test('select with group by complex query', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable).values([{ id: 1, name: 'John' }, { id: 2, name: 'Jane' }, { id: 3, name: 'Jane' }]); + + const result = await db.select({ name: usersTable.name }).from(usersTable) + .groupBy(usersTable.id, sql`${usersTable.name}`) + .orderBy(asc(usersTable.name)) + .limit(1); + + expect(result).toEqual([{ name: 'Jane' }]); + }); + + test('build query', async (ctx) => { + const { db } = ctx.singlestore; + + const query = db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable) + .groupBy(usersTable.id, usersTable.name) + .toSQL(); + + expect(query).toEqual({ + sql: `select \`id\`, \`name\` from \`userstest\` group by \`userstest\`.\`id\`, \`userstest\`.\`name\``, + params: [], + }); + }); + + test('Query check: Insert all defaults in 1 row', async (ctx) => { + const { db } = ctx.singlestore; + + const users = singlestoreTable('users', { + id: serial('id').primaryKey(), + name: text('name').default('Dan'), + state: text('state'), + }); + + const query = db + .insert(users) + .values({}) + .toSQL(); + + expect(query).toEqual({ + sql: 'insert into `users` (`id`, `name`, `state`) values (default, default, default)', + params: [], + }); + }); + + test('Query check: Insert all defaults in multiple rows', async (ctx) => { + const { db } = ctx.singlestore; + + const users = singlestoreTable('users', { + id: serial('id').primaryKey(), + name: text('name').default('Dan'), + state: text('state').default('UA'), + }); + + const query = db + .insert(users) + .values([{}, {}]) + .toSQL(); + + expect(query).toEqual({ + sql: + 'insert into `users` (`id`, `name`, `state`) values (default, default, default), (default, default, default)', + params: [], + }); + }); + + test('Insert all defaults in 1 row', async (ctx) => { + const { db } = ctx.singlestore; + + const users = singlestoreTable('empty_insert_single', { + id: serial('id').primaryKey(), + name: text('name').default('Dan'), + state: text('state'), + }); + + await db.execute(sql`drop table if exists ${users}`); + + await db.execute( + sql`create table ${users} (id serial primary key, name text default 'Dan', state text)`, + ); + + await db.insert(users).values({ id: 1 }); + + const res = await db.select().from(users); + + expect(res).toEqual([{ id: 1, name: 'Dan', state: null }]); + }); + + test('Insert all defaults in multiple rows', async (ctx) => { + const { db } = ctx.singlestore; + + const users = singlestoreTable('empty_insert_multiple', { + id: serial('id').primaryKey(), + name: text('name').default('Dan'), + state: text('state'), + }); + + await db.execute(sql`drop table if exists ${users}`); + + await db.execute( + sql`create table ${users} (id serial primary key, name text default 'Dan', state text)`, + ); + + await db.insert(users).values([{ id: 1 }, { id: 2 }]); + + const res = await db.select().from(users).orderBy(asc(users.id)); + + expect(res).toEqual([{ id: 1, name: 'Dan', state: null }, { id: 2, name: 'Dan', state: null }]); + }); + + test('build query insert with onDuplicate', async (ctx) => { + const { db } = ctx.singlestore; + + const query = db.insert(usersTable) + .values({ id: 1, name: 'John', jsonb: ['foo', 'bar'] }) + .onDuplicateKeyUpdate({ set: { id: 1, name: 'John1' } }) + .toSQL(); + + expect(query).toEqual({ + sql: + 'insert into `userstest` (`id`, `name`, `verified`, `jsonb`, `created_at`) values (?, ?, default, ?, default) on duplicate key update `id` = ?, `name` = ?', + params: [1, 'John', '["foo","bar"]', 1, 'John1'], + }); + }); + + test('insert with onDuplicate', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable) + .values({ id: 1, name: 'John' }); + + await db.insert(usersTable) + .values({ id: 1, name: 'John' }) + .onDuplicateKeyUpdate({ set: { name: 'John1' } }); + + const res = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).where( + eq(usersTable.id, 1), + ); + + expect(res).toEqual([{ id: 1, name: 'John1' }]); + }); + + test('insert conflict', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable) + .values({ id: 1, name: 'John' }); + + await expect((async () => { + db.insert(usersTable).values({ id: 1, name: 'John1' }); + })()).resolves.not.toThrowError(); + }); + + test('insert conflict with ignore', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable) + .values({ id: 1, name: 'John' }); + + await db.insert(usersTable) + .ignore() + .values({ id: 1, name: 'John1' }); + + const res = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).where( + eq(usersTable.id, 1), + ); + + expect(res).toEqual([{ id: 1, name: 'John' }]); + }); + + test('insert sql', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable).values({ id: 1, name: sql`${'John'}` }); + const result = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable); + expect(result).toEqual([{ id: 1, name: 'John' }]); + }); + + test('partial join with alias', async (ctx) => { + const { db } = ctx.singlestore; + const customerAlias = alias(usersTable, 'customer'); + + await db.insert(usersTable).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); + const result = await db + .select({ + user: { + id: usersTable.id, + name: usersTable.name, + }, + customer: { + id: customerAlias.id, + name: customerAlias.name, + }, + }).from(usersTable) + .leftJoin(customerAlias, eq(customerAlias.id, 11)) + .where(eq(usersTable.id, 10)) + .orderBy(asc(usersTable.id)); + + expect(result).toEqual([{ + user: { id: 10, name: 'Ivan' }, + customer: { id: 11, name: 'Hans' }, + }]); + }); + + test('full join with alias', async (ctx) => { + const { db } = ctx.singlestore; + + const singlestoreTable = singlestoreTableCreator((name) => `prefixed_${name}`); + + const users = singlestoreTable('users', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + + await db.execute(sql`drop table if exists ${users}`); + await db.execute(sql`create table ${users} (id serial primary key, name text not null)`); + + const customers = alias(users, 'customer'); + + await db.insert(users).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); + const result = await db + .select().from(users) + .leftJoin(customers, eq(customers.id, 11)) + .where(eq(users.id, 10)) + .orderBy(asc(users.id)); + + expect(result).toEqual([{ + users: { + id: 10, + name: 'Ivan', + }, + customer: { + id: 11, + name: 'Hans', + }, + }]); + + await db.execute(sql`drop table ${users}`); + }); + + test('select from alias', async (ctx) => { + const { db } = ctx.singlestore; + + const singlestoreTable = singlestoreTableCreator((name) => `prefixed_${name}`); + + const users = singlestoreTable('users', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + + await db.execute(sql`drop table if exists ${users}`); + await db.execute(sql`create table ${users} (id serial primary key, name text not null)`); + + const user = alias(users, 'user'); + const customers = alias(users, 'customer'); + + await db.insert(users).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); + const result = await db + .select() + .from(user) + .leftJoin(customers, eq(customers.id, 11)) + .where(eq(user.id, 10)) + .orderBy(asc(user.id)); + + expect(result).toEqual([{ + user: { + id: 10, + name: 'Ivan', + }, + customer: { + id: 11, + name: 'Hans', + }, + }]); + + await db.execute(sql`drop table ${users}`); + }); + + test('insert with spaces', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable).values({ id: 1, name: sql`'Jo h n'` }); + const result = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable); + + expect(result).toEqual([{ id: 1, name: 'Jo h n' }]); + }); + + test('prepared statement', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable).values({ id: 1, name: 'John' }); + const statement = db.select({ + id: usersTable.id, + name: usersTable.name, + }).from(usersTable) + .prepare(); + const result = await statement.execute(); + + expect(result).toEqual([{ id: 1, name: 'John' }]); + }); + + test('insert: placeholders on columns with encoder', async (ctx) => { + const { db } = ctx.singlestore; + + const date = new Date('2024-08-07T15:30:00Z'); + + const statement = db.insert(usersTable).values({ + id: 1, + name: 'John', + createdAt: sql.placeholder('createdAt'), + }).prepare(); + + await statement.execute({ createdAt: date }); + + const result = await db + .select({ + id: usersTable.id, + createdAt: usersTable.createdAt, + }) + .from(usersTable); + + expect(result).toEqual([ + { id: 1, createdAt: date }, + ]); + }); + + test('prepared statement reuse', async (ctx) => { + const { db } = ctx.singlestore; + + const stmt = db.insert(usersTable).values({ + verified: true, + id: placeholder('id'), + name: placeholder('name'), + }).prepare(); + + for (let i = 0; i < 10; i++) { + await stmt.execute({ id: i + 1, name: `John ${i}` }); + } + + const result = await db.select({ + id: usersTable.id, + name: usersTable.name, + verified: usersTable.verified, + }).from(usersTable) + .orderBy(asc(usersTable.id)); + + expect(result).toEqual([ + { id: 1, name: 'John 0', verified: true }, + { id: 2, name: 'John 1', verified: true }, + { id: 3, name: 'John 2', verified: true }, + { id: 4, name: 'John 3', verified: true }, + { id: 5, name: 'John 4', verified: true }, + { id: 6, name: 'John 5', verified: true }, + { id: 7, name: 'John 6', verified: true }, + { id: 8, name: 'John 7', verified: true }, + { id: 9, name: 'John 8', verified: true }, + { id: 10, name: 'John 9', verified: true }, + ]); + }); + + test('prepared statement with placeholder in .where', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable).values({ id: 1, name: 'John' }); + const stmt = db.select({ + id: usersTable.id, + name: usersTable.name, + }).from(usersTable) + .where(eq(usersTable.id, placeholder('id'))) + .prepare(); + const result = await stmt.execute({ id: 1 }); + + expect(result).toEqual([{ id: 1, name: 'John' }]); + }); + + test('migrator', async (ctx) => { + const { db } = ctx.singlestore; + + await db.execute(sql`drop table if exists cities_migration`); + await db.execute(sql`drop table if exists users_migration`); + await db.execute(sql`drop table if exists users12`); + await db.execute(sql`drop table if exists __drizzle_migrations`); + + await migrate(db, { migrationsFolder: './drizzle2/singlestore' }); + + await db.insert(usersMigratorTable).values({ id: 1, name: 'John', email: 'email' }); + + const result = await db.select().from(usersMigratorTable); + + expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); + + await db.execute(sql`drop table cities_migration`); + await db.execute(sql`drop table users_migration`); + await db.execute(sql`drop table users12`); + await db.execute(sql`drop table __drizzle_migrations`); + }); + + test('insert via db.execute + select via db.execute', async (ctx) => { + const { db } = ctx.singlestore; + + await db.execute( + sql`insert into ${usersTable} (${new Name(usersTable.id.name)},${new Name( + usersTable.name.name, + )}) values (1,${'John'})`, + ); + + const result = await db.execute<{ id: number; name: string }>(sql`select id, name from ${usersTable}`); + expect(result[0]).toEqual([{ id: 1, name: 'John' }]); + }); + + test('insert via db.execute w/ query builder', async (ctx) => { + const { db } = ctx.singlestore; + + const inserted = await db.execute( + db.insert(usersTable).values({ id: 1, name: 'John' }), + ); + expect(inserted[0].affectedRows).toBe(1); + }); + + test('insert + select all possible dates', async (ctx) => { + const { db } = ctx.singlestore; + + await db.execute(sql`drop table if exists \`datestable\``); + await db.execute( + sql` + create table \`datestable\` ( + \`date\` date, + \`date_as_string\` date, + \`time\` time, + \`datetime\` datetime, + \`datetime_as_string\` datetime, + \`timestamp\` timestamp(6), + \`timestamp_as_string\` timestamp(6), + \`year\` year + ) + `, + ); + + const date = new Date('2022-11-11'); + const dateWithMilliseconds = new Date('2022-11-11 12:12:12.123'); + + await db.insert(datesTable).values({ + date: date, + dateAsString: '2022-11-11', + time: '12:12:12', + datetime: date, + year: 22, + datetimeAsString: '2022-11-11 12:12:12', + timestamp: dateWithMilliseconds, + timestampAsString: '2022-11-11 12:12:12.123', + }); + + const res = await db.select().from(datesTable); + + expect(res[0]?.date).toBeInstanceOf(Date); + expect(res[0]?.datetime).toBeInstanceOf(Date); + expect(typeof res[0]?.dateAsString).toBe('string'); + expect(typeof res[0]?.datetimeAsString).toBe('string'); + + expect(res).toEqual([{ + date: toLocalDate(new Date('2022-11-11')), + dateAsString: '2022-11-11', + time: '12:12:12', + datetime: new Date('2022-11-11'), + year: 2022, + datetimeAsString: '2022-11-11 12:12:12', + timestamp: new Date('2022-11-11 12:12:12.123'), + timestampAsString: '2022-11-11 12:12:12.123000', + }]); + + await db.execute(sql`drop table if exists \`datestable\``); + }); + + const tableWithEnums = singlestoreTable('enums_test_case', { + id: serial('id').primaryKey(), + enum1: singlestoreEnum('enum1', ['a', 'b', 'c']).notNull(), + enum2: singlestoreEnum('enum2', ['a', 'b', 'c']).default('a'), + enum3: singlestoreEnum('enum3', ['a', 'b', 'c']).notNull().default('b'), + }); + + test('SingleStore enum test case #1', async (ctx) => { + const { db } = ctx.singlestore; + + await db.execute(sql`drop table if exists \`enums_test_case\``); + + await db.execute(sql` + create table \`enums_test_case\` ( + \`id\` serial primary key, + \`enum1\` ENUM('a', 'b', 'c') not null, + \`enum2\` ENUM('a', 'b', 'c') default 'a', + \`enum3\` ENUM('a', 'b', 'c') not null default 'b' + ) + `); + + await db.insert(tableWithEnums).values([ + { id: 1, enum1: 'a', enum2: 'b', enum3: 'c' }, + { id: 2, enum1: 'a', enum3: 'c' }, + { id: 3, enum1: 'a' }, + ]); + + const res = await db.select().from(tableWithEnums).orderBy(asc(tableWithEnums.id)); + + await db.execute(sql`drop table \`enums_test_case\``); + + expect(res).toEqual([ + { id: 1, enum1: 'a', enum2: 'b', enum3: 'c' }, + { id: 2, enum1: 'a', enum2: 'a', enum3: 'c' }, + { id: 3, enum1: 'a', enum2: 'a', enum3: 'b' }, + ]); + }); + + test('left join (flat object fields)', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(citiesTable) + .values([{ id: 1, name: 'Paris' }, { id: 2, name: 'London' }]); + + await db.insert(users2Table).values([{ id: 1, name: 'John', cityId: 1 }, { id: 2, name: 'Jane' }]); + + const res = await db.select({ + userId: users2Table.id, + userName: users2Table.name, + cityId: citiesTable.id, + cityName: citiesTable.name, + }).from(users2Table) + .leftJoin(citiesTable, eq(users2Table.cityId, citiesTable.id)) + .orderBy(users2Table.id); + + expect(res).toEqual([ + { userId: 1, userName: 'John', cityId: 1, cityName: 'Paris' }, + { userId: 2, userName: 'Jane', cityId: null, cityName: null }, + ]); + }); + + test('left join (grouped fields)', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(citiesTable) + .values([{ id: 1, name: 'Paris' }, { id: 2, name: 'London' }]); + + await db.insert(users2Table).values([{ id: 1, name: 'John', cityId: 1 }, { id: 2, name: 'Jane' }]); + + const res = await db.select({ + id: users2Table.id, + user: { + name: users2Table.name, + nameUpper: sql`upper(${users2Table.name})`, + }, + city: { + id: citiesTable.id, + name: citiesTable.name, + nameUpper: sql`upper(${citiesTable.name})`, + }, + }).from(users2Table) + .leftJoin(citiesTable, eq(users2Table.cityId, citiesTable.id)) + .orderBy(asc(users2Table.id)); + + expect(res).toEqual([ + { + id: 1, + user: { name: 'John', nameUpper: 'JOHN' }, + city: { id: 1, name: 'Paris', nameUpper: 'PARIS' }, + }, + { + id: 2, + user: { name: 'Jane', nameUpper: 'JANE' }, + city: null, + }, + ]); + }); + + test('left join (all fields)', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(citiesTable) + .values([{ id: 1, name: 'Paris' }, { id: 2, name: 'London' }]); + + await db.insert(users2Table).values([{ id: 1, name: 'John', cityId: 1 }, { id: 2, name: 'Jane' }]); + + const res = await db.select().from(users2Table) + .leftJoin(citiesTable, eq(users2Table.cityId, citiesTable.id)) + .orderBy(asc(users2Table.id)); + + expect(res).toEqual([ + { + users2: { + id: 1, + name: 'John', + cityId: 1, + }, + cities: { + id: 1, + name: 'Paris', + }, + }, + { + users2: { + id: 2, + name: 'Jane', + cityId: null, + }, + cities: null, + }, + ]); + }); + + test('join subquery', async (ctx) => { + const { db } = ctx.singlestore; + + await db.execute(sql`drop table if exists \`courses\``); + await db.execute(sql`drop table if exists \`course_categories\``); + + await db.execute( + sql` + create table \`course_categories\` ( + \`id\` serial primary key, + \`name\` text not null + ) + `, + ); + + await db.execute( + sql` + create table \`courses\` ( + \`id\` serial primary key, + \`name\` text not null, + \`category_id\` int + ) + `, + ); + + await db.insert(courseCategoriesTable).values([ + { id: 1, name: 'Category 1' }, + { id: 2, name: 'Category 2' }, + { id: 3, name: 'Category 3' }, + { id: 4, name: 'Category 4' }, + ]); + + await db.insert(coursesTable).values([ + { id: 1, name: 'Development', categoryId: 2 }, + { id: 2, name: 'IT & Software', categoryId: 3 }, + { id: 3, name: 'Marketing', categoryId: 4 }, + { id: 4, name: 'Design', categoryId: 1 }, + ]); + + const sq2 = db + .select({ + categoryId: courseCategoriesTable.id, + category: courseCategoriesTable.name, + total: sql`count(${courseCategoriesTable.id})`, + }) + .from(courseCategoriesTable) + .groupBy(courseCategoriesTable.id, courseCategoriesTable.name) + .as('sq2'); + + const res = await db + .select({ + courseName: coursesTable.name, + categoryId: sq2.categoryId, + }) + .from(coursesTable) + .leftJoin(sq2, eq(coursesTable.categoryId, sq2.categoryId)) + .orderBy(coursesTable.name); + + expect(res).toEqual([ + { courseName: 'Design', categoryId: 1 }, + { courseName: 'Development', categoryId: 2 }, + { courseName: 'IT & Software', categoryId: 3 }, + { courseName: 'Marketing', categoryId: 4 }, + ]); + + await db.execute(sql`drop table if exists \`courses\``); + await db.execute(sql`drop table if exists \`course_categories\``); + }); + + test('with ... select', async (ctx) => { + const { db } = ctx.singlestore; + + await db.execute(sql`drop table if exists \`orders\``); + await db.execute( + sql` + create table \`orders\` ( + \`id\` serial primary key, + \`region\` text not null, + \`product\` text not null, + \`amount\` int not null, + \`quantity\` int not null + ) + `, + ); + + await db.insert(orders).values([ + { region: 'Europe', product: 'A', amount: 10, quantity: 1 }, + { region: 'Europe', product: 'A', amount: 20, quantity: 2 }, + { region: 'Europe', product: 'B', amount: 20, quantity: 2 }, + { region: 'Europe', product: 'B', amount: 30, quantity: 3 }, + { region: 'US', product: 'A', amount: 30, quantity: 3 }, + { region: 'US', product: 'A', amount: 40, quantity: 4 }, + { region: 'US', product: 'B', amount: 40, quantity: 4 }, + { region: 'US', product: 'B', amount: 50, quantity: 5 }, + ]); + + const regionalSales = db + .$with('regional_sales') + .as( + db + .select({ + region: orders.region, + totalSales: sql`sum(${orders.amount})`.as('total_sales'), + }) + .from(orders) + .groupBy(orders.region), + ); + + const topRegions = db + .$with('top_regions') + .as( + db + .select({ + region: regionalSales.region, + }) + .from(regionalSales) + .where( + gt( + regionalSales.totalSales, + db.select({ sales: sql`sum(${regionalSales.totalSales})/10` }).from(regionalSales), + ), + ), + ); + + const result = await db + .with(regionalSales, topRegions) + .select({ + region: orders.region, + product: orders.product, + productUnits: sql`cast(sum(${orders.quantity}) as unsigned)`, + productSales: sql`cast(sum(${orders.amount}) as unsigned)`, + }) + .from(orders) + .where(inArray(orders.region, db.select({ region: topRegions.region }).from(topRegions))) + .groupBy(orders.region, orders.product) + .orderBy(orders.region, orders.product); + + expect(result).toEqual([ + { + region: 'Europe', + product: 'A', + productUnits: 3, + productSales: 30, + }, + { + region: 'Europe', + product: 'B', + productUnits: 5, + productSales: 50, + }, + { + region: 'US', + product: 'A', + productUnits: 7, + productSales: 70, + }, + { + region: 'US', + product: 'B', + productUnits: 9, + productSales: 90, + }, + ]); + }); + + test('with ... update', async (ctx) => { + const { db } = ctx.singlestore; + + const products = singlestoreTable('products', { + id: serial('id').primaryKey(), + price: decimal('price', { + precision: 15, + scale: 2, + }).notNull(), + cheap: boolean('cheap').notNull().default(false), + }); + + await db.execute(sql`drop table if exists ${products}`); + await db.execute(sql` + create table ${products} ( + id serial primary key, + price decimal(15, 2) not null, + cheap boolean not null default false + ) + `); + + await db.insert(products).values([ + { id: 1, price: '10.99' }, + { id: 2, price: '25.85' }, + { id: 3, price: '32.99' }, + { id: 4, price: '2.50' }, + { id: 5, price: '4.59' }, + ]); + + const averagePrice = db + .$with('average_price') + .as( + db + .select({ + value: sql`avg(${products.price})`.as('value'), + }) + .from(products), + ); + + await db + .with(averagePrice) + .update(products) + .set({ + cheap: true, + }) + .where(lt(products.price, sql`(select * from ${averagePrice})`)); + + const result = await db + .select({ + id: products.id, + }) + .from(products) + .where(eq(products.cheap, true)) + .orderBy(asc(products.id)); + + expect(result).toEqual([ + { id: 1 }, + { id: 4 }, + { id: 5 }, + ]); + }); + + test('with ... delete', async (ctx) => { + const { db } = ctx.singlestore; + + await db.execute(sql`drop table if exists \`orders\``); + await db.execute( + sql` + create table \`orders\` ( + \`id\` serial primary key, + \`region\` text not null, + \`product\` text not null, + \`amount\` int not null, + \`quantity\` int not null + ) + `, + ); + + await db.insert(orders).values([ + { id: 1, region: 'Europe', product: 'A', amount: 10, quantity: 1 }, + { id: 2, region: 'Europe', product: 'A', amount: 20, quantity: 2 }, + { id: 3, region: 'Europe', product: 'B', amount: 20, quantity: 2 }, + { id: 4, region: 'Europe', product: 'B', amount: 30, quantity: 3 }, + { id: 5, region: 'US', product: 'A', amount: 30, quantity: 3 }, + { id: 6, region: 'US', product: 'A', amount: 40, quantity: 4 }, + { id: 7, region: 'US', product: 'B', amount: 40, quantity: 4 }, + { id: 8, region: 'US', product: 'B', amount: 50, quantity: 5 }, + ]); + + const averageAmount = db + .$with('average_amount') + .as( + db + .select({ + value: sql`avg(${orders.amount})`.as('value'), + }) + .from(orders), + ); + + await db + .with(averageAmount) + .delete(orders) + .where(gt(orders.amount, sql`(select * from ${averageAmount})`)); + + const result = await db + .select({ + id: orders.id, + }) + .from(orders) + .orderBy(asc(orders.id)); + + expect(result).toEqual([ + { id: 1 }, + { id: 2 }, + { id: 3 }, + { id: 4 }, + { id: 5 }, + ]); + }); + + test('select from subquery sql', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(users2Table).values([{ id: 1, name: 'John' }, { id: 2, name: 'Jane' }]); + + const sq = db + .select({ name: sql`concat(${users2Table.name}, " modified")`.as('name') }) + .from(users2Table) + .orderBy(asc(users2Table.id)) + .as('sq'); + + const res = await db.select({ name: sq.name }).from(sq); + + expect(res).toEqual([{ name: 'John modified' }, { name: 'Jane modified' }]); + }); + + test('select a field without joining its table', (ctx) => { + const { db } = ctx.singlestore; + + expect(() => db.select({ name: users2Table.name }).from(usersTable).prepare()).toThrowError(); + }); + + test('select all fields from subquery without alias', (ctx) => { + const { db } = ctx.singlestore; + + const sq = db.$with('sq').as(db.select({ name: sql`upper(${users2Table.name})` }).from(users2Table)); + + expect(() => db.select().from(sq).prepare()).toThrowError(); + }); + + test('select count()', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }]); + + const res = await db.select({ count: sql`count(*)` }).from(usersTable); + + expect(res).toEqual([{ count: 2 }]); + }); + + test('select for ...', (ctx) => { + const { db } = ctx.singlestore; + + { + const query = db.select().from(users2Table).for('update').toSQL(); + expect(query.sql).toMatch(/ for update$/); + } + { + const query = db.select().from(users2Table).for('share', { skipLocked: true }).toSQL(); + expect(query.sql).toMatch(/ for share skip locked$/); + } + { + const query = db.select().from(users2Table).for('update', { noWait: true }).toSQL(); + expect(query.sql).toMatch(/ for update no wait$/); + } + }); + + test('having', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(citiesTable).values([{ id: 1, name: 'London' }, { id: 2, name: 'Paris' }, { + id: 3, + name: 'New York', + }]); + + await db.insert(users2Table).values([{ id: 1, name: 'John', cityId: 1 }, { id: 2, name: 'Jane', cityId: 1 }, { + id: 3, + name: 'Jack', + cityId: 2, + }]); + + const result = await db + .select({ + id: citiesTable.id, + name: sql`upper(${citiesTable.name})`.as('upper_name'), + usersCount: sql`count(${users2Table.id})`.as('users_count'), + }) + .from(citiesTable) + .leftJoin(users2Table, eq(users2Table.cityId, citiesTable.id)) + .where(({ name }) => sql`length(${name}) >= 3`) + .groupBy(citiesTable.id) + .having(({ usersCount }) => sql`${usersCount} > 0`) + .orderBy(({ name }) => name); + + expect(result).toEqual([ + { + id: 1, + name: 'LONDON', + usersCount: 2, + }, + { + id: 2, + name: 'PARIS', + usersCount: 1, + }, + ]); + }); + + // TODO: Unskip when views are supported + /* test.skip('view', async (ctx) => { + const { db } = ctx.singlestore; + + const newYorkers1 = singlestoreView('new_yorkers') + .as((qb) => qb.select().from(users2Table).where(eq(users2Table.cityId, 1))); + + const newYorkers2 = singlestoreView('new_yorkers', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: int('city_id').notNull(), + }).as(sql`select * from ${users2Table} where ${eq(users2Table.cityId, 1)}`); + + const newYorkers3 = singlestoreView('new_yorkers', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: int('city_id').notNull(), + }).existing(); + + await db.execute(sql`create view new_yorkers as ${getViewConfig(newYorkers1).query}`); + + await db.insert(citiesTable).values([{ id: 1, name: 'New York' }, { id: 2, name: 'Paris' }]); + + await db.insert(users2Table).values([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 1 }, + { id: 3, name: 'Jack', cityId: 2 }, + ]); + + { + const result = await db.select().from(newYorkers1).orderBy(asc(newYorkers1.id)); + expect(result).toEqual([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 1 }, + ]); + } + + { + const result = await db.select().from(newYorkers2).orderBy(asc(newYorkers2.id)); + expect(result).toEqual([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 1 }, + ]); + } + + { + const result = await db.select().from(newYorkers3).orderBy(asc(newYorkers3.id)); + expect(result).toEqual([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 1 }, + ]); + } + + { + const result = await db.select({ name: newYorkers1.name }).from(newYorkers1).orderBy(asc(newYorkers1.id)); + expect(result).toEqual([ + { name: 'John' }, + { name: 'Jane' }, + ]); + } + + await db.execute(sql`drop view ${newYorkers1}`); + }); */ + + test('select from raw sql', async (ctx) => { + const { db } = ctx.singlestore; + + const result = await db.select({ + id: sql`id`, + name: sql`name`, + }).from(sql`(select 1 as id, 'John' as name) as users`); + + Expect>; + + expect(result).toEqual([ + { id: 1, name: 'John' }, + ]); + }); + + test('select from raw sql with joins', async (ctx) => { + const { db } = ctx.singlestore; + + const result = await db + .select({ + id: sql`users.id`, + name: sql`users.name`, + userCity: sql`users.city`, + cityName: sql`cities.name`, + }) + .from(sql`(select 1 as id, 'John' as name, 'New York' as city) as users`) + .leftJoin(sql`(select 1 as id, 'Paris' as name) as cities`, sql`cities.id = users.id`); + + Expect>; + + expect(result).toEqual([ + { id: 1, name: 'John', userCity: 'New York', cityName: 'Paris' }, + ]); + }); + + test('join on aliased sql from select', async (ctx) => { + const { db } = ctx.singlestore; + + const result = await db + .select({ + userId: sql`users.id`.as('userId'), + name: sql`users.name`, + userCity: sql`users.city`, + cityId: sql`cities.id`.as('cityId'), + cityName: sql`cities.name`, + }) + .from(sql`(select 1 as id, 'John' as name, 'New York' as city) as users`) + .leftJoin(sql`(select 1 as id, 'Paris' as name) as cities`, (cols) => eq(cols.cityId, cols.userId)); + + Expect< + Equal<{ userId: number; name: string; userCity: string; cityId: number; cityName: string }[], typeof result> + >; + + expect(result).toEqual([ + { userId: 1, name: 'John', userCity: 'New York', cityId: 1, cityName: 'Paris' }, + ]); + }); + + test('join on aliased sql from with clause', async (ctx) => { + const { db } = ctx.singlestore; + + const users = db.$with('users').as( + db.select({ + id: sql`id`.as('userId'), + name: sql`name`.as('userName'), + city: sql`city`.as('city'), + }).from( + sql`(select 1 as id, 'John' as name, 'New York' as city) as users`, + ), + ); + + const cities = db.$with('cities').as( + db.select({ + id: sql`id`.as('cityId'), + name: sql`name`.as('cityName'), + }).from( + sql`(select 1 as id, 'Paris' as name) as cities`, + ), + ); + + const result = await db + .with(users, cities) + .select({ + userId: users.id, + name: users.name, + userCity: users.city, + cityId: cities.id, + cityName: cities.name, + }) + .from(users) + .leftJoin(cities, (cols) => eq(cols.cityId, cols.userId)); + + Expect< + Equal<{ userId: number; name: string; userCity: string; cityId: number; cityName: string }[], typeof result> + >; + + expect(result).toEqual([ + { userId: 1, name: 'John', userCity: 'New York', cityId: 1, cityName: 'Paris' }, + ]); + }); + + test('prefixed table', async (ctx) => { + const { db } = ctx.singlestore; + + const singlestoreTable = singlestoreTableCreator((name) => `myprefix_${name}`); + + const users = singlestoreTable('test_prefixed_table_with_unique_name', { + id: int('id').primaryKey(), + name: text('name').notNull(), + }); + + await db.execute(sql`drop table if exists ${users}`); + + await db.execute( + sql`create table myprefix_test_prefixed_table_with_unique_name (id int not null primary key, name text not null)`, + ); + + await db.insert(users).values({ id: 1, name: 'John' }); + + const result = await db.select().from(users); + + expect(result).toEqual([{ id: 1, name: 'John' }]); + + await db.execute(sql`drop table ${users}`); + }); + + test('orderBy with aliased column', (ctx) => { + const { db } = ctx.singlestore; + + const query = db.select({ + test: sql`something`.as('test'), + }).from(users2Table).orderBy((fields) => fields.test).toSQL(); + + expect(query.sql).toBe('select something as `test` from `users2` order by `test`'); + }); + + test('timestamp timezone', async (ctx) => { + const { db } = ctx.singlestore; + + const date = new Date(Date.parse('2020-01-01T12:34:56+07:00')); + + await db.insert(usersTable).values({ id: 1, name: 'With default times' }); + await db.insert(usersTable).values({ + id: 2, + name: 'Without default times', + createdAt: date, + }); + const users = await db.select().from(usersTable).orderBy(asc(usersTable.id)); + + // check that the timestamps are set correctly for default times + expect(Math.abs(users[0]!.createdAt.getTime() - Date.now())).toBeLessThan(2000); + + // check that the timestamps are set correctly for non default times + expect(Math.abs(users[1]!.createdAt.getTime() - date.getTime())).toBeLessThan(2000); + }); + + test('transaction', async (ctx) => { + const { db } = ctx.singlestore; + + const users = singlestoreTable('users_transactions', { + id: serial('id').primaryKey(), + balance: int('balance').notNull(), + }); + const products = singlestoreTable('products_transactions', { + id: serial('id').primaryKey(), + price: int('price').notNull(), + stock: int('stock').notNull(), + }); + + await db.execute(sql`drop table if exists ${users}`); + await db.execute(sql`drop table if exists ${products}`); + + await db.execute(sql`create table users_transactions (id serial not null primary key, balance int not null)`); + await db.execute( + sql`create table products_transactions (id serial not null primary key, price int not null, stock int not null)`, + ); + + const [{ insertId: userId }] = await db.insert(users).values({ id: 1, balance: 100 }); + const user = await db.select().from(users).where(eq(users.id, userId)).then((rows) => rows[0]!); + const [{ insertId: productId }] = await db.insert(products).values({ id: 1, price: 10, stock: 10 }); + const product = await db.select().from(products).where(eq(products.id, productId)).then((rows) => rows[0]!); + + await db.transaction(async (tx) => { + await tx.update(users).set({ balance: user.balance - product.price }).where(eq(users.id, user.id)); + await tx.update(products).set({ stock: product.stock - 1 }).where(eq(products.id, product.id)); + }); + + const result = await db.select().from(users); + + expect(result).toEqual([{ id: 1, balance: 90 }]); + + await db.execute(sql`drop table ${users}`); + await db.execute(sql`drop table ${products}`); + }); + + test('transaction rollback', async (ctx) => { + const { db } = ctx.singlestore; + + const users = singlestoreTable('users_transactions_rollback', { + id: serial('id').primaryKey(), + balance: int('balance').notNull(), + }); + + await db.execute(sql`drop table if exists ${users}`); + + await db.execute( + sql`create table users_transactions_rollback (id serial not null primary key, balance int not null)`, + ); + + await expect((async () => { + await db.transaction(async (tx) => { + await tx.insert(users).values({ balance: 100 }); + tx.rollback(); + }); + })()).rejects.toThrowError(TransactionRollbackError); + + const result = await db.select().from(users); + + expect(result).toEqual([]); + + await db.execute(sql`drop table ${users}`); + }); + + test('join subquery with join', async (ctx) => { + const { db } = ctx.singlestore; + + const internalStaff = singlestoreTable('internal_staff', { + userId: int('user_id').notNull(), + }); + + const customUser = singlestoreTable('custom_user', { + id: int('id').notNull(), + }); + + const ticket = singlestoreTable('ticket', { + staffId: int('staff_id').notNull(), + }); + + await db.execute(sql`drop table if exists ${internalStaff}`); + await db.execute(sql`drop table if exists ${customUser}`); + await db.execute(sql`drop table if exists ${ticket}`); + + await db.execute(sql`create table internal_staff (user_id integer not null)`); + await db.execute(sql`create table custom_user (id integer not null)`); + await db.execute(sql`create table ticket (staff_id integer not null)`); + + await db.insert(internalStaff).values({ userId: 1 }); + await db.insert(customUser).values({ id: 1 }); + await db.insert(ticket).values({ staffId: 1 }); + + const subq = db + .select() + .from(internalStaff) + .leftJoin(customUser, eq(internalStaff.userId, customUser.id)) + .as('internal_staff'); + + const mainQuery = await db + .select() + .from(ticket) + .leftJoin(subq, eq(subq.internal_staff.userId, ticket.staffId)); + + expect(mainQuery).toEqual([{ + ticket: { staffId: 1 }, + internal_staff: { + internal_staff: { userId: 1 }, + custom_user: { id: 1 }, + }, + }]); + + await db.execute(sql`drop table ${internalStaff}`); + await db.execute(sql`drop table ${customUser}`); + await db.execute(sql`drop table ${ticket}`); + }); + + // TODO: Unskip when views are supported + /* test.skip('subquery with view', async (ctx) => { + const { db } = ctx.singlestore; + + const users = singlestoreTable('users_subquery_view', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: int('city_id').notNull(), + }); + + const newYorkers = singlestoreView('new_yorkers').as((qb) => qb.select().from(users).where(eq(users.cityId, 1))); + + await db.execute(sql`drop table if exists ${users}`); + await db.execute(sql`drop view if exists ${newYorkers}`); + + await db.execute( + sql`create table ${users} (id serial not null primary key, name text not null, city_id integer not null)`, + ); + await db.execute(sql`create view ${newYorkers} as select * from ${users} where city_id = 1`); + + await db.insert(users).values([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 2 }, + { id: 3, name: 'Jack', cityId: 1 }, + { id: 4, name: 'Jill', cityId: 2 }, + ]); + + const sq = db.$with('sq').as(db.select().from(newYorkers)); + const result = await db.with(sq).select().from(sq).orderBy(asc(sq.id)); + + expect(result).toEqual([ + { id: 1, name: 'John', cityId: 1 }, + { id: 3, name: 'Jack', cityId: 1 }, + ]); + + await db.execute(sql`drop view ${newYorkers}`); + await db.execute(sql`drop table ${users}`); + }); */ + + // TODO: Unskip when views are supported + /* test.skip('join view as subquery', async (ctx) => { + const { db } = ctx.singlestore; + + const users = singlestoreTable('users_join_view', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: int('city_id').notNull(), + }); + + const newYorkers = singlestoreView('new_yorkers').as((qb) => qb.select().from(users).where(eq(users.cityId, 1))); + + await db.execute(sql`drop table if exists ${users}`); + await db.execute(sql`drop view if exists ${newYorkers}`); + + await db.execute( + sql`create table ${users} (id serial not null primary key, name text not null, city_id integer not null)`, + ); + await db.execute(sql`create view ${newYorkers} as select * from ${users} where city_id = 1`); + + await db.insert(users).values([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 2 }, + { id: 3, name: 'Jack', cityId: 1 }, + { id: 4, name: 'Jill', cityId: 2 }, + ]); + + const sq = db.select().from(newYorkers).as('new_yorkers_sq'); + + const result = await db.select().from(users).leftJoin(sq, eq(users.id, sq.id)).orderBy(asc(users.id)); + + expect(result).toEqual([ + { + users_join_view: { id: 1, name: 'John', cityId: 1 }, + new_yorkers_sq: { id: 1, name: 'John', cityId: 1 }, + }, + { + users_join_view: { id: 2, name: 'Jane', cityId: 2 }, + new_yorkers_sq: null, + }, + { + users_join_view: { id: 3, name: 'Jack', cityId: 1 }, + new_yorkers_sq: { id: 3, name: 'Jack', cityId: 1 }, + }, + { + users_join_view: { id: 4, name: 'Jill', cityId: 2 }, + new_yorkers_sq: null, + }, + ]); + + await db.execute(sql`drop view ${newYorkers}`); + await db.execute(sql`drop table ${users}`); + }); */ + + test('select iterator', async (ctx) => { + const { db } = ctx.singlestore; + + const users = singlestoreTable('users_iterator', { + id: serial('id').primaryKey(), + }); + + await db.execute(sql`drop table if exists ${users}`); + await db.execute(sql`create table ${users} (id serial not null primary key)`); + + await db.insert(users).values([{ id: 1 }, { id: 2 }, { id: 3 }]); + + const iter = db.select().from(users) + .orderBy(asc(users.id)) + .iterator(); + + const result: typeof users.$inferSelect[] = []; + + for await (const row of iter) { + result.push(row); + } + + expect(result).toEqual([{ id: 1 }, { id: 2 }, { id: 3 }]); + }); + + test('select iterator w/ prepared statement', async (ctx) => { + const { db } = ctx.singlestore; + + const users = singlestoreTable('users_iterator', { + id: serial('id').primaryKey(), + }); + + await db.execute(sql`drop table if exists ${users}`); + await db.execute(sql`create table ${users} (id serial not null primary key)`); + + await db.insert(users).values([{ id: 1 }, { id: 2 }, { id: 3 }]); + + const prepared = db.select().from(users) + .orderBy(asc(users.id)) + .prepare(); + const iter = prepared.iterator(); + const result: typeof users.$inferSelect[] = []; + + for await (const row of iter) { + result.push(row); + } + + expect(result).toEqual([{ id: 1 }, { id: 2 }, { id: 3 }]); + }); + + test('insert undefined', async (ctx) => { + const { db } = ctx.singlestore; + + const users = singlestoreTable('users', { + id: serial('id').primaryKey(), + name: text('name'), + }); + + await db.execute(sql`drop table if exists ${users}`); + + await db.execute( + sql`create table ${users} (id serial not null primary key, name text)`, + ); + + await expect((async () => { + await db.insert(users).values({ name: undefined }); + })()).resolves.not.toThrowError(); + + await db.execute(sql`drop table ${users}`); + }); + + test('update undefined', async (ctx) => { + const { db } = ctx.singlestore; + + const users = singlestoreTable('users', { + id: serial('id').primaryKey(), + name: text('name'), + }); + + await db.execute(sql`drop table if exists ${users}`); + + await db.execute( + sql`create table ${users} (id serial not null primary key, name text)`, + ); + + await expect((async () => { + await db.update(users).set({ name: undefined }); + })()).rejects.toThrowError(); + + await expect((async () => { + await db.update(users).set({ id: 1, name: undefined }); + })()).resolves.not.toThrowError(); + + await db.execute(sql`drop table ${users}`); + }); + + test('utc config for datetime', async (ctx) => { + const { db } = ctx.singlestore; + + await db.execute(sql`drop table if exists \`datestable\``); + await db.execute( + sql` + create table \`datestable\` ( + \`datetime_utc\` datetime(6), + \`datetime\` datetime(6) + ) + `, + ); + const datesTable = singlestoreTable('datestable', { + datetimeUTC: datetime('datetime_utc', { mode: 'date' }), + datetime: datetime('datetime'), + }); + + const dateObj = new Date('2022-11-11'); + const dateUtc = new Date('2022-11-11T12:12:12.122Z'); + + await db.insert(datesTable).values({ + datetimeUTC: dateUtc, + datetime: dateObj, + }); + + const res = await db.select().from(datesTable); + + const [rawSelect] = await db.execute(sql`select \`datetime_utc\` from \`datestable\``); + const selectedRow = (rawSelect as unknown as [{ datetime_utc: string }])[0]; + + expect(selectedRow.datetime_utc).toBe('2022-11-11 12:12:12.122000'); + expect(new Date(selectedRow.datetime_utc.replace(' ', 'T') + 'Z')).toEqual(dateUtc); + + expect(res[0]?.datetime).toBeInstanceOf(Date); + expect(res[0]?.datetimeUTC).toBeInstanceOf(Date); + + expect(res).toEqual([{ + datetimeUTC: dateUtc, + datetime: new Date('2022-11-11'), + }]); + + await db.execute(sql`drop table if exists \`datestable\``); + }); + + // TODO (https://memsql.atlassian.net/browse/MCDB-63261) allow chaining limit and orderby in subquery + test('set operations (union) from query builder with subquery', async (ctx) => { + const { db } = ctx.singlestore; + + await setupSetOperationTest(db); + const citiesQuery = db + .select({ + id: citiesTable.id, + name: citiesTable.name, + orderCol: sql`0`.as('orderCol'), + }) + .from(citiesTable); + + const usersQuery = db + .select({ + id: users2Table.id, + name: users2Table.name, + orderCol: sql`1`.as('orderCol'), + }) + .from(users2Table); + + const unionQuery = db + .select({ + id: sql`id`, + name: sql`name`, + }) + .from( + citiesQuery.union(usersQuery).as('combined'), + ) + .orderBy(sql`orderCol`, sql`id`) + .limit(8); + + const result = await unionQuery; + + expect(result).toHaveLength(8); + + expect(result).toEqual([ + { id: 1, name: 'New York' }, + { id: 2, name: 'London' }, + { id: 3, name: 'Tampa' }, + { id: 1, name: 'John' }, + { id: 2, name: 'Jane' }, + { id: 3, name: 'Jack' }, + { id: 4, name: 'Peter' }, + { id: 5, name: 'Ben' }, + ]); + + // union should throw if selected fields are not in the same order + await expect((async () => { + db + .select({ id: citiesTable.id, name: citiesTable.name }) + .from(citiesTable).union( + db + .select({ name: users2Table.name, id: users2Table.id }) + .from(users2Table), + ); + })()).rejects.toThrowError(); + }); + + test('set operations (union) as function', async (ctx) => { + const { db } = ctx.singlestore; + + await setupSetOperationTest(db); + + const result = await union( + db + .select({ id: citiesTable.id, name: citiesTable.name }) + .from(citiesTable).where(eq(citiesTable.id, 1)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + ); + + expect(result).toHaveLength(2); + + expect(result).toEqual([ + { id: 1, name: 'New York' }, + { id: 1, name: 'John' }, + ]); + + await expect((async () => { + union( + db + .select({ id: citiesTable.id, name: citiesTable.name }) + .from(citiesTable).where(eq(citiesTable.id, 1)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + db + .select({ name: users2Table.name, id: users2Table.id }) + .from(users2Table).where(eq(users2Table.id, 1)), + ); + })()).rejects.toThrowError(); + }); + + test('set operations (union all) from query builder', async (ctx) => { + const { db } = ctx.singlestore; + + await setupSetOperationTest(db); + + const sq = db + .select({ id: citiesTable.id, name: citiesTable.name }) + .from(citiesTable).orderBy(asc(sql`id`)).limit(2).unionAll( + db + .select({ id: citiesTable.id, name: citiesTable.name }) + .from(citiesTable).orderBy(asc(sql`id`)).limit(2), + ).as('sq'); + + const result = await db.select().from(sq).orderBy(asc(sql`id`)).limit(3); + + expect(result).toHaveLength(3); + + expect(result).toEqual([ + { id: 1, name: 'New York' }, + { id: 1, name: 'New York' }, + { id: 2, name: 'London' }, + ]); + + await expect((async () => { + db + .select({ id: citiesTable.id, name: citiesTable.name }) + .from(citiesTable).limit(2).unionAll( + db + .select({ name: citiesTable.name, id: citiesTable.id }) + .from(citiesTable).limit(2), + ).orderBy(asc(sql`id`)); + })()).rejects.toThrowError(); + }); + + test('set operations (union all) as function', async (ctx) => { + const { db } = ctx.singlestore; + + await setupSetOperationTest(db); + + const sq = unionAll( + db + .select({ id: citiesTable.id, name: citiesTable.name }) + .from(citiesTable).where(eq(citiesTable.id, 1)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + ).as('sq'); + + const result = await db.select().from(sq).limit(1); + + expect(result).toHaveLength(1); + + expect(result).toEqual([ + { id: 1, name: 'New York' }, + ]); + + await expect((async () => { + unionAll( + db + .select({ id: citiesTable.id, name: citiesTable.name }) + .from(citiesTable).where(eq(citiesTable.id, 1)), + db + .select({ name: users2Table.name, id: users2Table.id }) + .from(users2Table).where(eq(users2Table.id, 1)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + ).limit(1); + })()).rejects.toThrowError(); + }); + + test('set operations (intersect) from query builder', async (ctx) => { + const { db } = ctx.singlestore; + + await setupSetOperationTest(db); + + const sq = db + .select({ id: citiesTable.id, name: citiesTable.name }) + .from(citiesTable).intersect( + db + .select({ id: citiesTable.id, name: citiesTable.name }) + .from(citiesTable).where(gt(citiesTable.id, 1)), + ) + .as('sq'); + + const result = await db.select().from(sq).orderBy(asc(sql`id`)); + + expect(result).toHaveLength(2); + + expect(result).toEqual([ + { id: 2, name: 'London' }, + { id: 3, name: 'Tampa' }, + ]); + + await expect((async () => { + db + .select({ name: citiesTable.name, id: citiesTable.id }) + .from(citiesTable).intersect( + db + .select({ id: citiesTable.id, name: citiesTable.name }) + .from(citiesTable).where(gt(citiesTable.id, 1)), + ); + })()).rejects.toThrowError(); + }); + + test('set operations (intersect) as function', async (ctx) => { + const { db } = ctx.singlestore; + + await setupSetOperationTest(db); + + const sq = await intersect( + db + .select({ id: citiesTable.id, name: citiesTable.name }) + .from(citiesTable).where(eq(citiesTable.id, 1)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + ).as('sq'); + + const result = await db.select().from(sq).limit(1); + + expect(result).toHaveLength(0); + + expect(result).toEqual([]); + + await expect((async () => { + intersect( + db + .select({ id: citiesTable.id, name: citiesTable.name }) + .from(citiesTable).where(eq(citiesTable.id, 1)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + db + .select({ name: users2Table.name, id: users2Table.id }) + .from(users2Table).where(eq(users2Table.id, 1)), + ).limit(1); + })()).rejects.toThrowError(); + }); + + test('set operations (except) from query builder', async (ctx) => { + const { db } = ctx.singlestore; + + await setupSetOperationTest(db); + + const result = await db + .select() + .from(citiesTable).except( + db + .select() + .from(citiesTable).where(gt(citiesTable.id, 1)), + ); + + expect(result).toHaveLength(1); + + expect(result).toEqual([ + { id: 1, name: 'New York' }, + ]); + }); + + test('set operations (except) as function', async (ctx) => { + const { db } = ctx.singlestore; + + await setupSetOperationTest(db); + + const sq = await except( + db + .select({ id: citiesTable.id, name: citiesTable.name }) + .from(citiesTable), + db + .select({ id: citiesTable.id, name: citiesTable.name }) + .from(citiesTable).where(eq(citiesTable.id, 1)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + ).as('sq'); + + const result = await db.select().from(sq).limit(3); + + expect(result).toHaveLength(2); + + expect(result).toEqual([ + { id: 2, name: 'London' }, + { id: 3, name: 'Tampa' }, + ]); + + await expect((async () => { + except( + db + .select({ name: citiesTable.name, id: citiesTable.id }) + .from(citiesTable), + db + .select({ id: citiesTable.id, name: citiesTable.name }) + .from(citiesTable).where(eq(citiesTable.id, 1)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + ).limit(3); + })()).rejects.toThrowError(); + }); + + test.skip('set operations (mixed) from query builder', async (ctx) => { + const { db } = ctx.singlestore; + + await setupSetOperationTest(db); + + const sq1 = unionAll( + db + .select() + .from(citiesTable).where(gt(citiesTable.id, 1)), + db.select().from(citiesTable).where(eq(citiesTable.id, 2)), + ).as('sq1'); + + const sq2 = await db.select().from(sq1).orderBy(asc(sql`id`)).as('sq2'); + + const sq3 = await db.select().from(sq2).limit(1).offset(1).as('sq3'); + + const result = await db + .select() + .from(citiesTable) + .except( + db + .select() + .from(sq3), + ); + + expect(result).toHaveLength(2); + + expect(result).toEqual([ + { id: 3, name: 'Tampa' }, + { id: 1, name: 'New York' }, + ]); + + await expect((async () => { + db + .select() + .from(citiesTable).except( + ({ unionAll }) => + unionAll( + db + .select({ name: citiesTable.name, id: citiesTable.id }) + .from(citiesTable).where(gt(citiesTable.id, 1)), + db.select().from(citiesTable).where(eq(citiesTable.id, 2)), + ), + ); + })()).rejects.toThrowError(); + }); + + test('set operations (mixed all) as function with subquery', async (ctx) => { + const { db } = ctx.singlestore; + + await setupSetOperationTest(db); + + const sq1 = except( + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(gte(users2Table.id, 5)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 7)), + ).as('sq1'); + + const sq2 = await db.select().from(sq1).orderBy(asc(sql`id`)).as('sq2'); + + const sq3 = await db.select().from(sq2).limit(1).as('sq3'); + + const result = await union( + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + db.select().from(sq3), + db + .select().from(citiesTable).where(gt(citiesTable.id, 1)), + ); + + expect(result).toHaveLength(4); + + // multiple results possible as a result of the filters >= 5 and ==7 because singlestore doesn't guarantee order + // dynamically validate results + const hasValidEntry = (entry: { id: number; name: string }) => { + if (entry.id === 1) return entry.name === 'John'; + if (entry.id > 1 && entry.id < 5) return entry.name === 'Tampa' || entry.name === 'London'; + if (entry.id >= 5 && entry.id !== 7) return true; // Accept any entry with id >= 5 and not 7 + return false; + }; + + for (const entry of result) { + expect(hasValidEntry(entry)).toBe(true); + } + + await expect((async () => { + union( + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + except( + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(gte(users2Table.id, 5)), + db + .select({ name: users2Table.name, id: users2Table.id }) + .from(users2Table).where(eq(users2Table.id, 7)), + ).limit(1), + db + .select().from(citiesTable).where(gt(citiesTable.id, 1)), + ); + })()).rejects.toThrowError(); + }); + + test('aggregate function: count', async (ctx) => { + const { db } = ctx.singlestore; + const table = aggregateTable; + await setupAggregateFunctionsTest(db); + + const result1 = await db.select({ value: count() }).from(table); + const result2 = await db.select({ value: count(table.a) }).from(table); + const result3 = await db.select({ value: countDistinct(table.name) }).from(table); + + expect(result1[0]?.value).toBe(7); + expect(result2[0]?.value).toBe(5); + expect(result3[0]?.value).toBe(6); + }); + + test('aggregate function: avg', async (ctx) => { + const { db } = ctx.singlestore; + const table = aggregateTable; + await setupAggregateFunctionsTest(db); + + const result1 = await db.select({ value: avg(table.b) }).from(table); + const result2 = await db.select({ value: avg(table.nullOnly) }).from(table); + const result3 = await db.select({ value: avgDistinct(table.b) }).from(table); + + expect(result1[0]?.value).toBe('33.3333'); + expect(result2[0]?.value).toBe(null); + expect(result3[0]?.value).toBe('42.5000'); + }); + + test('aggregate function: sum', async (ctx) => { + const { db } = ctx.singlestore; + const table = aggregateTable; + await setupAggregateFunctionsTest(db); + + const result1 = await db.select({ value: sum(table.b) }).from(table); + const result2 = await db.select({ value: sum(table.nullOnly) }).from(table); + const result3 = await db.select({ value: sumDistinct(table.b) }).from(table); + + expect(result1[0]?.value).toBe('200'); + expect(result2[0]?.value).toBe(null); + expect(result3[0]?.value).toBe('170'); + }); + + test('aggregate function: max', async (ctx) => { + const { db } = ctx.singlestore; + const table = aggregateTable; + await setupAggregateFunctionsTest(db); + + const result1 = await db.select({ value: max(table.b) }).from(table); + const result2 = await db.select({ value: max(table.nullOnly) }).from(table); + + expect(result1[0]?.value).toBe(90); + expect(result2[0]?.value).toBe(null); + }); + + test('aggregate function: min', async (ctx) => { + const { db } = ctx.singlestore; + const table = aggregateTable; + await setupAggregateFunctionsTest(db); + + const result1 = await db.select({ value: min(table.b) }).from(table); + const result2 = await db.select({ value: min(table.nullOnly) }).from(table); + + expect(result1[0]?.value).toBe(10); + expect(result2[0]?.value).toBe(null); + }); + + test('test $onUpdateFn and $onUpdate works as $default', async (ctx) => { + const { db } = ctx.singlestore; + + await db.execute(sql`drop table if exists ${usersOnUpdate}`); + + await db.execute( + sql` + create table ${usersOnUpdate} ( + id serial not null primary key, + name text not null, + update_counter integer default 1 not null, + updated_at datetime(6), + always_null text + ) + `, + ); + + await db.insert(usersOnUpdate).values([ + { id: 1, name: 'John' }, + { id: 2, name: 'Jane' }, + { id: 3, name: 'Jack' }, + { id: 4, name: 'Jill' }, + ]); + const { updatedAt, ...rest } = getTableColumns(usersOnUpdate); + + const justDates = await db.select({ updatedAt }).from(usersOnUpdate); + + const response = await db.select({ ...rest }).from(usersOnUpdate).orderBy(asc(usersOnUpdate.id)); + + expect(response).toEqual([ + { name: 'John', id: 1, updateCounter: 1, alwaysNull: null }, + { name: 'Jane', id: 2, updateCounter: 1, alwaysNull: null }, + { name: 'Jack', id: 3, updateCounter: 1, alwaysNull: null }, + { name: 'Jill', id: 4, updateCounter: 1, alwaysNull: null }, + ]); + const msDelay = 750; + + for (const eachUser of justDates) { + expect(eachUser.updatedAt!.valueOf()).toBeGreaterThan(Date.now() - msDelay); + } + }); + + test('test $onUpdateFn and $onUpdate works updating', async (ctx) => { + const { db } = ctx.singlestore; + + await db.execute(sql`drop table if exists ${usersOnUpdate}`); + + await db.execute( + sql` + create table ${usersOnUpdate} ( + id serial not null primary key, + name text not null, + update_counter integer default 1 not null, + updated_at datetime(6), + always_null text + ) + `, + ); + + await db.insert(usersOnUpdate).values([ + { id: 1, name: 'John', alwaysNull: 'this will will be null after updating' }, + { id: 2, name: 'Jane' }, + { id: 3, name: 'Jack' }, + { id: 4, name: 'Jill' }, + ]); + const { updatedAt, ...rest } = getTableColumns(usersOnUpdate); + const initial = await db.select({ id: usersOnUpdate.id, updatedAt: usersOnUpdate.updatedAt }).from(usersOnUpdate); + + await db.update(usersOnUpdate).set({ name: 'Angel' }).where(eq(usersOnUpdate.id, 1)); + + const justDates = await db.select({ id: usersOnUpdate.id, updatedAt: usersOnUpdate.updatedAt }).from( + usersOnUpdate, + ); + + const response = await db.select().from(usersOnUpdate).orderBy(asc(usersOnUpdate.id)); + + expect(response).toEqual([ + { id: 1, name: 'Angel', updateCounter: 2, updatedAt: expect.any(Date), alwaysNull: null }, + { id: 2, name: 'Jane', updateCounter: 1, updatedAt: expect.any(Date), alwaysNull: null }, + { id: 3, name: 'Jack', updateCounter: 1, updatedAt: expect.any(Date), alwaysNull: null }, + { id: 4, name: 'Jill', updateCounter: 1, updatedAt: expect.any(Date), alwaysNull: null }, + ]); + + const initialRecord = initial.find((record) => record.id === 1); + const updatedRecord = justDates.find((record) => record.id === 1); + + expect(initialRecord?.updatedAt?.valueOf()).not.toBe(updatedRecord?.updatedAt?.valueOf()); + + const msDelay = 1000; + + for (const eachUser of justDates) { + expect(eachUser.updatedAt!.valueOf()).toBeGreaterThan(Date.now() - msDelay); + } + }); + + // mySchema tests + test('mySchema :: select all fields', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersMySchemaTable).values({ id: 1, name: 'John' }); + const result = await db.select().from(usersMySchemaTable); + + expect(result[0]!.createdAt).toBeInstanceOf(Date); + // not timezone based timestamp, thats why it should not work here + // t.assert(Math.abs(result[0]!.createdAt.getTime() - now) < 2000); + expect(result).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); + }); + + test('mySchema :: select sql', async (ctx) => { + const { db } = ctx.singlestore; + await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); + + await db.insert(usersMySchemaTable).values({ name: 'John' }); + const users = await db.select({ + name: sql`upper(${usersMySchemaTable.name})`, + }).from(usersMySchemaTable); + + expect(users).toEqual([{ name: 'JOHN' }]); + }); + + test('mySchema :: select typed sql', async (ctx) => { + const { db } = ctx.singlestore; + await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); + + await db.insert(usersMySchemaTable).values({ name: 'John' }); + const users = await db.select({ + name: sql`upper(${usersMySchemaTable.name})`, + }).from(usersMySchemaTable); + + expect(users).toEqual([{ name: 'JOHN' }]); + }); + + test('mySchema :: select distinct', async (ctx) => { + const { db } = ctx.singlestore; + + const usersDistinctTable = singlestoreTable('users_distinct', { + id: int('id').notNull(), + name: text('name').notNull(), + }); + + await db.execute(sql`drop table if exists ${usersDistinctTable}`); + await db.execute(sql`create table ${usersDistinctTable} (id int, name text)`); + + await db.insert(usersDistinctTable).values([ + { id: 1, name: 'John' }, + { id: 1, name: 'John' }, + { id: 2, name: 'John' }, + { id: 1, name: 'Jane' }, + ]); + const users = await db.selectDistinct().from(usersDistinctTable).orderBy( + usersDistinctTable.id, + usersDistinctTable.name, + ); + + await db.execute(sql`drop table ${usersDistinctTable}`); + + expect(users).toEqual([{ id: 1, name: 'Jane' }, { id: 1, name: 'John' }, { id: 2, name: 'John' }]); + }); + + test('mySchema :: insert returning sql', async (ctx) => { + const { db } = ctx.singlestore; + await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); + + const [result, _] = await db.insert(usersMySchemaTable).values({ id: 1, name: 'John' }); + + expect(result.insertId).toBe(1); + }); + + test('mySchema :: delete returning sql', async (ctx) => { + const { db } = ctx.singlestore; + await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); + + await db.insert(usersMySchemaTable).values({ name: 'John' }); + const users = await db.delete(usersMySchemaTable).where(eq(usersMySchemaTable.name, 'John')); + + expect(users[0].affectedRows).toBe(1); + }); + + test('mySchema :: update with returning partial', async (ctx) => { + const { db } = ctx.singlestore; + await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); + + await db.insert(usersMySchemaTable).values({ id: 1, name: 'John' }); + const updatedUsers = await db.update(usersMySchemaTable).set({ name: 'Jane' }).where( + eq(usersMySchemaTable.name, 'John'), + ); + + const users = await db.select({ id: usersMySchemaTable.id, name: usersMySchemaTable.name }).from( + usersMySchemaTable, + ) + .where( + eq(usersMySchemaTable.id, 1), + ); + + expect(updatedUsers[0].changedRows).toBe(1); + + expect(users).toEqual([{ id: 1, name: 'Jane' }]); + }); + + test('mySchema :: delete with returning all fields', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersMySchemaTable).values({ name: 'John' }); + const deletedUser = await db.delete(usersMySchemaTable).where(eq(usersMySchemaTable.name, 'John')); + + expect(deletedUser[0].affectedRows).toBe(1); + }); + + test('mySchema :: insert + select', async (ctx) => { + const { db } = ctx.singlestore; + await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); + + await db.insert(usersMySchemaTable).values({ id: 1, name: 'John' }); + const result = await db.select().from(usersMySchemaTable); + expect(result).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); + + await db.insert(usersMySchemaTable).values({ id: 2, name: 'Jane' }); + const result2 = await db.select().from(usersMySchemaTable).orderBy(asc(usersMySchemaTable.id)); + expect(result2).toEqual([ + { id: 1, name: 'John', verified: false, jsonb: null, createdAt: result2[0]!.createdAt }, + { id: 2, name: 'Jane', verified: false, jsonb: null, createdAt: result2[1]!.createdAt }, + ]); + }); + + test('mySchema :: insert with overridden default values', async (ctx) => { + const { db } = ctx.singlestore; + await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); + + await db.insert(usersMySchemaTable).values({ id: 1, name: 'John', verified: true }); + const result = await db.select().from(usersMySchemaTable); + + expect(result).toEqual([{ id: 1, name: 'John', verified: true, jsonb: null, createdAt: result[0]!.createdAt }]); + }); + + test('mySchema :: insert many', async (ctx) => { + const { db } = ctx.singlestore; + await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); + + await db.insert(usersMySchemaTable).values([ + { id: 1, name: 'John' }, + { id: 2, name: 'Bruce', jsonb: ['foo', 'bar'] }, + { id: 3, name: 'Jane' }, + { id: 4, name: 'Austin', verified: true }, + ]); + const result = await db.select({ + id: usersMySchemaTable.id, + name: usersMySchemaTable.name, + jsonb: usersMySchemaTable.jsonb, + verified: usersMySchemaTable.verified, + }).from(usersMySchemaTable) + .orderBy(asc(usersMySchemaTable.id)); + + expect(result).toEqual([ + { id: 1, name: 'John', jsonb: null, verified: false }, + { id: 2, name: 'Bruce', jsonb: ['foo', 'bar'], verified: false }, + { id: 3, name: 'Jane', jsonb: null, verified: false }, + { id: 4, name: 'Austin', jsonb: null, verified: true }, + ]); + }); + + test('mySchema :: select with group by as field', async (ctx) => { + const { db } = ctx.singlestore; + await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); + + await db.insert(usersMySchemaTable).values([{ id: 1, name: 'John' }, { id: 2, name: 'Jane' }, { + id: 3, + name: 'Jane', + }]); + + const result = await db.select({ name: usersMySchemaTable.name }).from(usersMySchemaTable) + .groupBy(usersMySchemaTable.name) + .orderBy(asc(usersMySchemaTable.id)); + + expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }]); + }); + + test('mySchema :: select with group by as column + sql', async (ctx) => { + const { db } = ctx.singlestore; + await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); + + await db.insert(usersMySchemaTable).values([{ id: 1, name: 'John' }, { id: 2, name: 'Jane' }, { + id: 3, + name: 'Jane', + }]); + + const result = await db.select({ name: usersMySchemaTable.name }).from(usersMySchemaTable) + .groupBy(usersMySchemaTable.id, sql`${usersMySchemaTable.name}`) + .orderBy(asc(usersMySchemaTable.id)); + + expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); + }); + + test('mySchema :: build query', async (ctx) => { + const { db } = ctx.singlestore; + + const query = db.select({ id: usersMySchemaTable.id, name: usersMySchemaTable.name }).from(usersMySchemaTable) + .groupBy(usersMySchemaTable.id, usersMySchemaTable.name) + .toSQL(); + + expect(query).toEqual({ + sql: + `select \`id\`, \`name\` from \`mySchema\`.\`userstest\` group by \`mySchema\`.\`userstest\`.\`id\`, \`mySchema\`.\`userstest\`.\`name\``, + params: [], + }); + }); + + test('mySchema :: insert with spaces', async (ctx) => { + const { db } = ctx.singlestore; + await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); + + await db.insert(usersMySchemaTable).values({ id: 1, name: sql`'Jo h n'` }); + const result = await db.select({ id: usersMySchemaTable.id, name: usersMySchemaTable.name }).from( + usersMySchemaTable, + ); + + expect(result).toEqual([{ id: 1, name: 'Jo h n' }]); + }); + + test('mySchema :: prepared statement with placeholder in .where', async (ctx) => { + const { db } = ctx.singlestore; + await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); + + await db.insert(usersMySchemaTable).values({ id: 1, name: 'John' }); + const stmt = db.select({ + id: usersMySchemaTable.id, + name: usersMySchemaTable.name, + }).from(usersMySchemaTable) + .where(eq(usersMySchemaTable.id, sql.placeholder('id'))) + .prepare(); + const result = await stmt.execute({ id: 1 }); + + expect(result).toEqual([{ id: 1, name: 'John' }]); + }); + + test('mySchema :: select from tables with same name from different schema using alias', async (ctx) => { + const { db } = ctx.singlestore; + await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); + + await db.execute(sql`drop table if exists \`userstest\``); + await db.execute( + sql` + create table \`userstest\` ( + \`id\` serial primary key, + \`name\` text not null, + \`verified\` boolean not null default false, + \`jsonb\` json, + \`created_at\` timestamp not null default now() + ) + `, + ); + + await db.insert(usersMySchemaTable).values({ id: 10, name: 'Ivan' }); + await db.insert(usersTable).values({ id: 11, name: 'Hans' }); + + const customerAlias = alias(usersTable, 'customer'); + + const result = await db + .select().from(usersMySchemaTable) + .leftJoin(customerAlias, eq(customerAlias.id, 11)) + .where(eq(usersMySchemaTable.id, 10)); + + expect(result).toEqual([{ + userstest: { + id: 10, + name: 'Ivan', + verified: false, + jsonb: null, + createdAt: result[0]!.userstest.createdAt, + }, + customer: { + id: 11, + name: 'Hans', + verified: false, + jsonb: null, + createdAt: result[0]!.customer!.createdAt, + }, + }]); + }); + + test('insert $returningId: serial as id', async (ctx) => { + const { db } = ctx.singlestore; + + const result = await db.insert(usersTable).values({ id: 1, name: 'John' }).$returningId(); + + expectTypeOf(result).toEqualTypeOf<{ + id: number; + }[]>(); + + expect(result).toStrictEqual([{ id: 1 }]); + }); + + test('insert $returningId: serial as id, batch insert', async (ctx) => { + const { db } = ctx.singlestore; + + const result = await db.insert(usersTable).values([{ id: 1, name: 'John' }, { id: 2, name: 'John1' }]) + .$returningId(); + + expectTypeOf(result).toEqualTypeOf<{ + id: number; + }[]>(); + + // singlestore auto increments when batch inserting, so the ids increment by one + expect(result).toStrictEqual([{ id: 2 }, { id: 3 }]); + }); + + test('insert $returningId: $default as primary key', async (ctx) => { + const { db } = ctx.singlestore; + + const uniqueKeys = ['ao865jf3mcmkfkk8o5ri495z', 'dyqs529eom0iczo2efxzbcut']; + let iterator = 0; + + const usersTableDefFn = singlestoreTable('users_default_fn', { + customId: varchar('id', { length: 256 }).primaryKey().$defaultFn(() => { + const value = uniqueKeys[iterator]!; + iterator++; + return value; + }), + name: text('name').notNull(), + }); + + await setupReturningFunctionsTest(db); + + const result = await db.insert(usersTableDefFn).values([{ name: 'John' }, { name: 'John1' }]) + // ^? + .$returningId(); + + expectTypeOf(result).toEqualTypeOf<{ + customId: string; + }[]>(); + + expect(result).toStrictEqual([{ customId: 'ao865jf3mcmkfkk8o5ri495z' }, { + customId: 'dyqs529eom0iczo2efxzbcut', + }]); + }); + + test('insert $returningId: $default as primary key with value', async (ctx) => { + const { db } = ctx.singlestore; + + const uniqueKeys = ['ao865jf3mcmkfkk8o5ri495z', 'dyqs529eom0iczo2efxzbcut']; + let iterator = 0; + + const usersTableDefFn = singlestoreTable('users_default_fn', { + customId: varchar('id', { length: 256 }).primaryKey().$defaultFn(() => { + const value = uniqueKeys[iterator]!; + iterator++; + return value; + }), + name: text('name').notNull(), + }); + + await setupReturningFunctionsTest(db); + + const result = await db.insert(usersTableDefFn).values([{ name: 'John', customId: 'test' }, { name: 'John1' }]) + // ^? + .$returningId(); + + expectTypeOf(result).toEqualTypeOf<{ + customId: string; + }[]>(); + + expect(result).toStrictEqual([{ customId: 'test' }, { customId: 'ao865jf3mcmkfkk8o5ri495z' }]); + }); + + // TODO: Unkip this test when views are supported + /* test.skip('mySchema :: view', async (ctx) => { + const { db } = ctx.singlestore; + + const newYorkers1 = mySchema.view('new_yorkers') + .as((qb) => qb.select().from(users2MySchemaTable).where(eq(users2MySchemaTable.cityId, 1))); + + const newYorkers2 = mySchema.view('new_yorkers', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: int('city_id').notNull(), + }).as(sql`select * from ${users2MySchemaTable} where ${eq(users2MySchemaTable.cityId, 1)}`); + + const newYorkers3 = mySchema.view('new_yorkers', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: int('city_id').notNull(), + }).existing(); + + await db.execute(sql`create view ${newYorkers1} as ${getViewConfig(newYorkers1).query}`); + + await db.insert(citiesMySchemaTable).values([{ id: 1, name: 'New York' }, { id: 2, name: 'Paris' }]); + + await db.insert(users2MySchemaTable).values([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 1 }, + { id: 3, name: 'Jack', cityId: 2 }, + ]); + + { + const result = await db.select().from(newYorkers1).orderBy(asc(newYorkers1.id)); + expect(result).toEqual([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 1 }, + ]); + } + + { + const result = await db.select().from(newYorkers2).orderBy(asc(newYorkers2.id)); + expect(result).toEqual([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 1 }, + ]); + } + + { + const result = await db.select().from(newYorkers3).orderBy(asc(newYorkers3.id)); + expect(result).toEqual([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 1 }, + ]); + } + + { + const result = await db.select({ name: newYorkers1.name }).from(newYorkers1).orderBy(asc(newYorkers1.id)); + expect(result).toEqual([ + { name: 'John' }, + { name: 'Jane' }, + ]); + } + + await db.execute(sql`drop view ${newYorkers1}`); + }); */ + + test('limit 0', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable).values({ name: 'John' }); + const users = await db + .select() + .from(usersTable) + .limit(0); + + expect(users).toEqual([]); + }); + + test('limit -1', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable).values({ name: 'John' }); + const users = await db + .select() + .from(usersTable) + .limit(-1); + + expect(users.length).toBeGreaterThan(0); + }); + }); +} diff --git a/integration-tests/tests/singlestore/singlestore-custom.test.ts b/integration-tests/tests/singlestore/singlestore-custom.test.ts new file mode 100644 index 000000000..c599df436 --- /dev/null +++ b/integration-tests/tests/singlestore/singlestore-custom.test.ts @@ -0,0 +1,827 @@ +import retry from 'async-retry'; +import type Docker from 'dockerode'; +import { asc, eq, Name, placeholder, sql } from 'drizzle-orm'; +import type { SingleStoreDriverDatabase } from 'drizzle-orm/singlestore'; +import { drizzle } from 'drizzle-orm/singlestore'; +import { + alias, + binary, + customType, + date, + datetime, + serial, + singlestoreEnum, + singlestoreTable, + singlestoreTableCreator, + text, + time, + varchar, + year, +} from 'drizzle-orm/singlestore-core'; +import { migrate } from 'drizzle-orm/singlestore/migrator'; +import * as mysql2 from 'mysql2/promise'; +import { v4 as uuid } from 'uuid'; +import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; +import { toLocalDate } from '~/utils'; +import { createDockerDB } from './singlestore-common'; + +const ENABLE_LOGGING = false; + +let db: SingleStoreDriverDatabase; +let client: mysql2.Connection; +let container: Docker.Container | undefined; + +beforeAll(async () => { + let connectionString; + if (process.env['SINGLESTORE_CONNECTION_STRING']) { + connectionString = process.env['SINGLESTORE_CONNECTION_STRING']; + } else { + const { connectionString: conStr, container: contrainerObj } = await createDockerDB(); + connectionString = conStr; + container = contrainerObj; + } + client = await retry(async () => { + client = await mysql2.createConnection(connectionString); + await client.connect(); + return client; + }, { + retries: 20, + factor: 1, + minTimeout: 250, + maxTimeout: 250, + randomize: false, + onRetry() { + client?.end(); + }, + }); + await client.query(`CREATE DATABASE IF NOT EXISTS drizzle;`); + await client.changeUser({ database: 'drizzle' }); + db = drizzle(client, { logger: ENABLE_LOGGING }); +}); + +afterAll(async () => { + await client?.end(); + await container?.stop().catch(console.error); +}); + +beforeEach((ctx) => { + ctx.singlestore = { + db, + }; +}); + +const customSerial = customType<{ data: number; notNull: true; default: true }>({ + dataType() { + return 'serial'; + }, +}); + +const customText = customType<{ data: string }>({ + dataType() { + return 'text'; + }, +}); + +const customBoolean = customType<{ data: boolean }>({ + dataType() { + return 'boolean'; + }, + fromDriver(value) { + if (typeof value === 'boolean') { + return value; + } + return value === 1; + }, +}); + +const customJson = (name: string) => + customType<{ data: TData; driverData: string }>({ + dataType() { + return 'json'; + }, + toDriver(value: TData): string { + return JSON.stringify(value); + }, + })(name); + +const customTimestamp = customType< + { data: Date; driverData: string; config: { fsp: number } } +>({ + dataType(config) { + const precision = config?.fsp === undefined ? '' : ` (${config.fsp})`; + return `timestamp${precision}`; + }, + fromDriver(value: string): Date { + return new Date(value); + }, +}); + +const customBinary = customType<{ data: string; driverData: Buffer; config: { length: number } }>({ + dataType(config) { + return config?.length === undefined + ? `binary` + : `binary(${config.length})`; + }, + + toDriver(value) { + return sql`UNHEX(${value})`; + }, + + fromDriver(value) { + return value.toString('hex'); + }, +}); + +const usersTable = singlestoreTable('userstest', { + id: customSerial('id').primaryKey(), + name: customText('name').notNull(), + verified: customBoolean('verified').notNull().default(false), + jsonb: customJson('jsonb'), + createdAt: customTimestamp('created_at').notNull().default(sql`now()`), +}); + +const datesTable = singlestoreTable('datestable', { + date: date('date'), + dateAsString: date('date_as_string', { mode: 'string' }), + time: time('time'), + datetime: datetime('datetime'), + datetimeAsString: datetime('datetime_as_string', { mode: 'string' }), + year: year('year'), +}); + +export const testTable = singlestoreTable('test_table', { + id: customBinary('id', { length: 16 }).primaryKey(), + sqlId: binary('sql_id', { length: 16 }), + rawId: varchar('raw_id', { length: 64 }), +}); + +const usersMigratorTable = singlestoreTable('users12', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + email: text('email').notNull(), +}); + +beforeEach(async () => { + await db.execute(sql`drop table if exists \`userstest\``); + await db.execute(sql`drop table if exists \`datestable\``); + await db.execute(sql`drop table if exists \`test_table\``); + // await ctx.db.execute(sql`create schema public`); + await db.execute( + sql` + create table \`userstest\` ( + \`id\` serial primary key, + \`name\` text not null, + \`verified\` boolean not null default false, + \`jsonb\` json, + \`created_at\` timestamp not null default now() + ) + `, + ); + + await db.execute( + sql` + create table \`datestable\` ( + \`date\` date, + \`date_as_string\` date, + \`time\` time, + \`datetime\` datetime, + \`datetime_as_string\` datetime, + \`year\` year + ) + `, + ); + + await db.execute( + sql` + create table \`test_table\` ( + \`id\` binary(16) primary key, + \`sql_id\` binary(16), + \`raw_id\` varchar(64) + ) + `, + ); +}); + +test('select all fields', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable).values({ id: 1, name: 'John' }); + const result = await db.select().from(usersTable); + + expect(result[0]!.createdAt).toBeInstanceOf(Date); + // not timezone based timestamp, thats why it should not work here + // t.assert(Math.abs(result[0]!.createdAt.getTime() - now) < 2000); + expect(result).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); +}); + +test('select sql', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable).values({ name: 'John' }); + const users = await db.select({ + name: sql`upper(${usersTable.name})`, + }).from(usersTable); + + expect(users).toEqual([{ name: 'JOHN' }]); +}); + +test('select typed sql', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable).values({ name: 'John' }); + const users = await db.select({ + name: sql`upper(${usersTable.name})`, + }).from(usersTable); + + expect(users).toEqual([{ name: 'JOHN' }]); +}); + +test('insert returning sql', async (ctx) => { + const { db } = ctx.singlestore; + + const [result, _] = await db.insert(usersTable).values({ id: 1, name: 'John' }); + + expect(result.insertId).toBe(1); +}); + +test('delete returning sql', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable).values({ name: 'John' }); + const users = await db.delete(usersTable).where(eq(usersTable.name, 'John')); + + expect(users[0].affectedRows).toBe(1); +}); + +test('update returning sql', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable).values({ name: 'John' }); + const users = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')); + + expect(users[0].changedRows).toBe(1); +}); + +test('update with returning all fields', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable).values({ id: 1, name: 'John' }); + const updatedUsers = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')); + + const users = await db.select().from(usersTable).where(eq(usersTable.id, 1)); + + expect(updatedUsers[0].changedRows).toBe(1); + + expect(users[0]!.createdAt).toBeInstanceOf(Date); + // not timezone based timestamp, thats why it should not work here + // t.assert(Math.abs(users[0]!.createdAt.getTime() - now) < 2000); + expect(users).toEqual([{ id: 1, name: 'Jane', verified: false, jsonb: null, createdAt: users[0]!.createdAt }]); +}); + +test('update with returning partial', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable).values({ id: 1, name: 'John' }); + const updatedUsers = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')); + + const users = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).where( + eq(usersTable.id, 1), + ); + + expect(updatedUsers[0].changedRows).toBe(1); + + expect(users).toEqual([{ id: 1, name: 'Jane' }]); +}); + +test('delete with returning all fields', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable).values({ name: 'John' }); + const deletedUser = await db.delete(usersTable).where(eq(usersTable.name, 'John')); + + expect(deletedUser[0].affectedRows).toBe(1); +}); + +test('delete with returning partial', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable).values({ name: 'John' }); + const deletedUser = await db.delete(usersTable).where(eq(usersTable.name, 'John')); + + expect(deletedUser[0].affectedRows).toBe(1); +}); + +test('insert + select', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable).values({ id: 1, name: 'John' }); + const result = await db.select().from(usersTable); + expect(result).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); + + await db.insert(usersTable).values({ id: 2, name: 'Jane' }); + const result2 = await db.select().from(usersTable).orderBy(asc(usersTable.id)); + expect(result2).toEqual([ + { id: 1, name: 'John', verified: false, jsonb: null, createdAt: result2[0]!.createdAt }, + { id: 2, name: 'Jane', verified: false, jsonb: null, createdAt: result2[1]!.createdAt }, + ]); +}); + +test('json insert', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable).values({ id: 1, name: 'John', jsonb: ['foo', 'bar'] }); + const result = await db.select({ + id: usersTable.id, + name: usersTable.name, + jsonb: usersTable.jsonb, + }).from(usersTable); + + expect(result).toEqual([{ id: 1, name: 'John', jsonb: ['foo', 'bar'] }]); +}); + +test('insert with overridden default values', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable).values({ id: 1, name: 'John', verified: true }); + const result = await db.select().from(usersTable); + + expect(result).toEqual([{ id: 1, name: 'John', verified: true, jsonb: null, createdAt: result[0]!.createdAt }]); +}); + +test('insert many', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable).values([ + { id: 1, name: 'John' }, + { id: 2, name: 'Bruce', jsonb: ['foo', 'bar'] }, + { id: 3, name: 'Jane' }, + { id: 4, name: 'Austin', verified: true }, + ]); + const result = await db.select({ + id: usersTable.id, + name: usersTable.name, + jsonb: usersTable.jsonb, + verified: usersTable.verified, + }).from(usersTable).orderBy(asc(usersTable.id)); + + expect(result).toEqual([ + { id: 1, name: 'John', jsonb: null, verified: false }, + { id: 2, name: 'Bruce', jsonb: ['foo', 'bar'], verified: false }, + { id: 3, name: 'Jane', jsonb: null, verified: false }, + { id: 4, name: 'Austin', jsonb: null, verified: true }, + ]); +}); + +test('insert many with returning', async (ctx) => { + const { db } = ctx.singlestore; + + const result = await db.insert(usersTable).values([ + { name: 'John' }, + { name: 'Bruce', jsonb: ['foo', 'bar'] }, + { name: 'Jane' }, + { name: 'Austin', verified: true }, + ]); + + expect(result[0].affectedRows).toBe(4); +}); + +test('select with group by as field', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable).values([{ id: 1, name: 'John' }, { id: 2, name: 'Jane' }, { id: 3, name: 'Jane' }]); + + const result = await db.select({ name: usersTable.name }).from(usersTable) + .groupBy(usersTable.name).orderBy(asc(usersTable.id)); + + expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }]); +}); + +test('select with group by as sql', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable).values([{ id: 1, name: 'John' }, { id: 2, name: 'Jane' }, { id: 3, name: 'Jane' }]); + + const result = await db.select({ name: usersTable.name }).from(usersTable) + .groupBy(sql`${usersTable.name}`).orderBy(asc(usersTable.id)); + + expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }]); +}); + +test('select with group by as sql + column', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable).values([{ id: 1, name: 'John' }, { id: 2, name: 'Jane' }, { id: 3, name: 'Jane' }]); + + const result = await db.select({ name: usersTable.name }).from(usersTable) + .groupBy(sql`${usersTable.name}`, usersTable.id).orderBy(asc(usersTable.id)); + + expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); +}); + +test('select with group by as column + sql', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable).values([{ id: 1, name: 'John' }, { id: 2, name: 'Jane' }, { id: 3, name: 'Jane' }]); + + const result = await db.select({ name: usersTable.name }).from(usersTable) + .groupBy(usersTable.id, sql`${usersTable.name}`).orderBy(asc(usersTable.id)); + + expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); +}); + +test('select with group by complex query', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable).values([{ id: 1, name: 'John' }, { id: 2, name: 'Jane' }, { id: 3, name: 'Jane' }]); + + const result = await db.select({ name: usersTable.name }).from(usersTable) + .groupBy(usersTable.id, sql`${usersTable.name}`) + .orderBy(asc(usersTable.name)) + .limit(1); + + expect(result).toEqual([{ name: 'Jane' }]); +}); + +test('build query', async (ctx) => { + const { db } = ctx.singlestore; + + const query = db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable) + .groupBy(usersTable.id, usersTable.name) + .toSQL(); + + expect(query).toEqual({ + sql: `select \`id\`, \`name\` from \`userstest\` group by \`userstest\`.\`id\`, \`userstest\`.\`name\``, + params: [], + }); +}); + +test('build query insert with onDuplicate', async (ctx) => { + const { db } = ctx.singlestore; + + const query = db.insert(usersTable) + .values({ name: 'John', jsonb: ['foo', 'bar'] }) + .onDuplicateKeyUpdate({ set: { name: 'John1' } }) + .toSQL(); + + expect(query).toEqual({ + sql: + 'insert into `userstest` (`id`, `name`, `verified`, `jsonb`, `created_at`) values (default, ?, default, ?, default) on duplicate key update `name` = ?', + params: ['John', '["foo","bar"]', 'John1'], + }); +}); + +test('insert with onDuplicate', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable) + .values({ id: 1, name: 'John' }); + + await db.insert(usersTable) + .values({ id: 1, name: 'John' }) + .onDuplicateKeyUpdate({ set: { name: 'John1' } }); + + const res = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).where( + eq(usersTable.id, 1), + ); + + expect(res).toEqual([{ id: 1, name: 'John1' }]); +}); + +test('insert conflict', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable) + .values({ id: 1, name: 'John' }); + + await expect((async () => { + db.insert(usersTable).values({ id: 1, name: 'John1' }); + })()).resolves.not.toThrowError(); +}); + +test('insert conflict with ignore', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable) + .values({ id: 1, name: 'John' }); + + await db.insert(usersTable) + .ignore() + .values({ id: 1, name: 'John1' }); + + const res = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).where( + eq(usersTable.id, 1), + ); + + expect(res).toEqual([{ id: 1, name: 'John' }]); +}); + +test('insert sql', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable).values({ id: 1, name: sql`${'John'}` }); + const result = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable); + expect(result).toEqual([{ id: 1, name: 'John' }]); +}); + +test('partial join with alias', async (ctx) => { + const { db } = ctx.singlestore; + const customerAlias = alias(usersTable, 'customer'); + + await db.insert(usersTable).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); + const result = await db + .select({ + user: { + id: usersTable.id, + name: usersTable.name, + }, + customer: { + id: customerAlias.id, + name: customerAlias.name, + }, + }).from(usersTable) + .leftJoin(customerAlias, eq(customerAlias.id, 11)) + .where(eq(usersTable.id, 10)) + .orderBy(asc(usersTable.id)); + + expect(result).toEqual([{ + user: { id: 10, name: 'Ivan' }, + customer: { id: 11, name: 'Hans' }, + }]); +}); + +test('full join with alias', async (ctx) => { + const { db } = ctx.singlestore; + + const singlestoreTable = singlestoreTableCreator((name) => `prefixed_${name}`); + + const users = singlestoreTable('users', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + + await db.execute(sql`drop table if exists ${users}`); + await db.execute(sql`create table ${users} (id serial primary key, name text not null)`); + + const customers = alias(users, 'customer'); + + await db.insert(users).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); + const result = await db + .select().from(users) + .leftJoin(customers, eq(customers.id, 11)) + .where(eq(users.id, 10)) + .orderBy(asc(users.id)); + + expect(result).toEqual([{ + users: { + id: 10, + name: 'Ivan', + }, + customer: { + id: 11, + name: 'Hans', + }, + }]); + + await db.execute(sql`drop table ${users}`); +}); + +test('select from alias', async (ctx) => { + const { db } = ctx.singlestore; + + const singlestoreTable = singlestoreTableCreator((name) => `prefixed_${name}`); + + const users = singlestoreTable('users', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + + await db.execute(sql`drop table if exists ${users}`); + await db.execute(sql`create table ${users} (id serial primary key, name text not null)`); + + const user = alias(users, 'user'); + const customers = alias(users, 'customer'); + + await db.insert(users).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); + const result = await db + .select() + .from(user) + .leftJoin(customers, eq(customers.id, 11)) + .where(eq(user.id, 10)) + .orderBy(asc(user.id)); + + expect(result).toEqual([{ + user: { + id: 10, + name: 'Ivan', + }, + customer: { + id: 11, + name: 'Hans', + }, + }]); + + await db.execute(sql`drop table ${users}`); +}); + +test('insert with spaces', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable).values({ id: 1, name: sql`'Jo h n'` }); + const result = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable); + + expect(result).toEqual([{ id: 1, name: 'Jo h n' }]); +}); + +test('prepared statement', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable).values({ id: 1, name: 'John' }); + const statement = db.select({ + id: usersTable.id, + name: usersTable.name, + }).from(usersTable) + .prepare(); + const result = await statement.execute(); + + expect(result).toEqual([{ id: 1, name: 'John' }]); +}); + +test('prepared statement reuse', async (ctx) => { + const { db } = ctx.singlestore; + + const stmt = db.insert(usersTable).values({ + id: placeholder('id'), + verified: true, + name: placeholder('name'), + }).prepare(); + + for (let i = 0; i < 10; i++) { + await stmt.execute({ id: i + 1, name: `John ${i}` }); + } + + const result = await db.select({ + id: usersTable.id, + name: usersTable.name, + verified: usersTable.verified, + }).from(usersTable).orderBy(asc(usersTable.id)); + + expect(result).toEqual([ + { id: 1, name: 'John 0', verified: true }, + { id: 2, name: 'John 1', verified: true }, + { id: 3, name: 'John 2', verified: true }, + { id: 4, name: 'John 3', verified: true }, + { id: 5, name: 'John 4', verified: true }, + { id: 6, name: 'John 5', verified: true }, + { id: 7, name: 'John 6', verified: true }, + { id: 8, name: 'John 7', verified: true }, + { id: 9, name: 'John 8', verified: true }, + { id: 10, name: 'John 9', verified: true }, + ]); +}); + +test('prepared statement with placeholder in .where', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable).values({ id: 1, name: 'John' }); + const stmt = db.select({ + id: usersTable.id, + name: usersTable.name, + }).from(usersTable) + .where(eq(usersTable.id, placeholder('id'))) + .prepare(); + const result = await stmt.execute({ id: 1 }); + + expect(result).toEqual([{ id: 1, name: 'John' }]); +}); + +test('migrator', async (ctx) => { + const { db } = ctx.singlestore; + + await db.execute(sql`drop table if exists cities_migration`); + await db.execute(sql`drop table if exists users_migration`); + await db.execute(sql`drop table if exists users12`); + await db.execute(sql`drop table if exists __drizzle_migrations`); + + await migrate(db, { migrationsFolder: './drizzle2/singlestore' }); + + await db.insert(usersMigratorTable).values({ id: 1, name: 'John', email: 'email' }); + + const result = await db.select().from(usersMigratorTable); + + expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); + + await db.execute(sql`drop table cities_migration`); + await db.execute(sql`drop table users_migration`); + await db.execute(sql`drop table users12`); + await db.execute(sql`drop table __drizzle_migrations`); +}); + +test('insert via db.execute + select via db.execute', async (ctx) => { + const { db } = ctx.singlestore; + + await db.execute( + sql`insert into ${usersTable} (${new Name(usersTable.id.name)}, ${new Name( + usersTable.name.name, + )}) values (1,${'John'})`, + ); + + const result = await db.execute<{ id: number; name: string }>(sql`select id, name from ${usersTable}`); + expect(result[0]).toEqual([{ id: 1, name: 'John' }]); +}); + +test('insert via db.execute w/ query builder', async (ctx) => { + const { db } = ctx.singlestore; + + const inserted = await db.execute( + db.insert(usersTable).values({ name: 'John' }), + ); + expect(inserted[0].affectedRows).toBe(1); +}); + +test('insert + select all possible dates', async (ctx) => { + const { db } = ctx.singlestore; + + const date = new Date('2022-11-11'); + + await db.insert(datesTable).values({ + date: date, + dateAsString: '2022-11-11', + time: '12:12:12', + datetime: date, + year: 22, + datetimeAsString: '2022-11-11 12:12:12', + }); + + const res = await db.select().from(datesTable); + + expect(res[0]?.date).toBeInstanceOf(Date); + expect(res[0]?.datetime).toBeInstanceOf(Date); + expect(res[0]?.dateAsString).toBeTypeOf('string'); + expect(res[0]?.datetimeAsString).toBeTypeOf('string'); + + expect(res).toEqual([{ + date: toLocalDate(new Date('2022-11-11')), + dateAsString: '2022-11-11', + time: '12:12:12', + datetime: new Date('2022-11-11'), + year: 2022, + datetimeAsString: '2022-11-11 12:12:12', + }]); +}); + +const tableWithEnums = singlestoreTable('enums_test_case', { + id: serial('id').primaryKey(), + enum1: singlestoreEnum('enum1', ['a', 'b', 'c']).notNull(), + enum2: singlestoreEnum('enum2', ['a', 'b', 'c']).default('a'), + enum3: singlestoreEnum('enum3', ['a', 'b', 'c']).notNull().default('b'), +}); + +test('SingleStore enum test case #1', async (ctx) => { + const { db } = ctx.singlestore; + + await db.execute(sql`drop table if exists \`enums_test_case\``); + + await db.execute(sql` + create table \`enums_test_case\` ( + \`id\` serial primary key, + \`enum1\` ENUM('a', 'b', 'c') not null, + \`enum2\` ENUM('a', 'b', 'c') default 'a', + \`enum3\` ENUM('a', 'b', 'c') not null default 'b' + ) + `); + + await db.insert(tableWithEnums).values([ + { id: 1, enum1: 'a', enum2: 'b', enum3: 'c' }, + { id: 2, enum1: 'a', enum3: 'c' }, + { id: 3, enum1: 'a' }, + ]); + + const res = await db.select().from(tableWithEnums).orderBy(asc(tableWithEnums.id)); + + await db.execute(sql`drop table \`enums_test_case\``); + + expect(res).toEqual([ + { id: 1, enum1: 'a', enum2: 'b', enum3: 'c' }, + { id: 2, enum1: 'a', enum2: 'a', enum3: 'c' }, + { id: 3, enum1: 'a', enum2: 'a', enum3: 'b' }, + ]); +}); + +test('custom binary', async (ctx) => { + const { db } = ctx.singlestore; + + const id = uuid().replace(/-/g, ''); + await db.insert(testTable).values({ + id, + sqlId: sql`UNHEX(${id})`, + rawId: id, + }); + + const res = await db.select().from(testTable); + + expect(res).toEqual([{ + id, + sqlId: Buffer.from(id, 'hex'), + rawId: id, + }]); +}); diff --git a/integration-tests/tests/singlestore/singlestore-prefixed.test.ts b/integration-tests/tests/singlestore/singlestore-prefixed.test.ts new file mode 100644 index 000000000..6f29d31a2 --- /dev/null +++ b/integration-tests/tests/singlestore/singlestore-prefixed.test.ts @@ -0,0 +1,1574 @@ +import retry from 'async-retry'; +import type Docker from 'dockerode'; +import type { Equal } from 'drizzle-orm'; +import { asc, eq, getTableName, gt, inArray, Name, sql, TransactionRollbackError } from 'drizzle-orm'; +import type { SingleStoreDriverDatabase } from 'drizzle-orm/singlestore'; +import { drizzle } from 'drizzle-orm/singlestore'; +import { + alias, + boolean, + date, + datetime, + int, + json, + serial, + singlestoreEnum, + singlestoreTable as singlestoreTableRaw, + singlestoreTableCreator, + /* singlestoreView, */ + text, + time, + timestamp, + uniqueIndex, + year, +} from 'drizzle-orm/singlestore-core'; +import { migrate } from 'drizzle-orm/singlestore/migrator'; +import * as mysql2 from 'mysql2/promise'; +import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; +import { Expect, toLocalDate } from '~/utils'; +import { createDockerDB } from './singlestore-common'; + +const ENABLE_LOGGING = false; + +let db: SingleStoreDriverDatabase; +let client: mysql2.Connection; +let container: Docker.Container | undefined; + +beforeAll(async () => { + let connectionString; + if (process.env['SINGLESTORE_CONNECTION_STRING']) { + connectionString = process.env['SINGLESTORE_CONNECTION_STRING']; + } else { + const { connectionString: conStr, container: contrainerObj } = await createDockerDB(); + connectionString = conStr; + container = contrainerObj; + } + client = await retry(async () => { + client = await mysql2.createConnection(connectionString); + await client.connect(); + return client; + }, { + retries: 20, + factor: 1, + minTimeout: 250, + maxTimeout: 250, + randomize: false, + onRetry() { + client?.end(); + }, + }); + + await client.query(`CREATE DATABASE IF NOT EXISTS drizzle;`); + await client.changeUser({ database: 'drizzle' }); + db = drizzle(client, { logger: ENABLE_LOGGING }); +}); + +afterAll(async () => { + await client?.end(); + await container?.stop().catch(console.error); +}); + +const tablePrefix = 'drizzle_tests_'; + +const singlestoreTable = singlestoreTableCreator((name) => `${tablePrefix}${name}`); +const usersTable = singlestoreTable('userstest', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + verified: boolean('verified').notNull().default(false), + jsonb: json('jsonb').$type(), + createdAt: timestamp('created_at').notNull().defaultNow(), +}); + +const users2Table = singlestoreTable('users2', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: int('city_id'), +}); + +const citiesTable = singlestoreTable('cities', { + id: serial('id').primaryKey(), + name: text('name').notNull(), +}); + +beforeEach(async () => { + await db.execute(sql`drop table if exists ${usersTable}`); + await db.execute(sql`drop table if exists ${users2Table}`); + await db.execute(sql`drop table if exists ${citiesTable}`); + + await db.execute( + sql` + create table ${usersTable} ( + \`id\` serial primary key, + \`name\` text not null, + \`verified\` boolean not null default false, + \`jsonb\` json, + \`created_at\` timestamp not null default now() + ) + `, + ); + + await db.execute( + sql` + create table ${users2Table} ( + \`id\` serial primary key, + \`name\` text not null, + \`city_id\` int + ) + `, + ); + + await db.execute( + sql` + create table ${citiesTable} ( + \`id\` serial primary key, + \`name\` text not null + ) + `, + ); +}); + +test('select all fields', async () => { + await db.insert(usersTable).values({ id: 1, name: 'John' }); + const result = await db.select().from(usersTable); + + expect(result[0]!.createdAt).toBeInstanceOf(Date); + // not timezone based timestamp, thats why it should not work here + // t.assert(Math.abs(result[0]!.createdAt.getTime() - now) < 2000); + expect(result).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); +}); + +test('select sql', async () => { + await db.insert(usersTable).values({ name: 'John' }); + const users = await db.select({ + name: sql`upper(${usersTable.name})`, + }).from(usersTable); + + expect(users).toEqual([{ name: 'JOHN' }]); +}); + +test('select typed sql', async () => { + await db.insert(usersTable).values({ name: 'John' }); + const users = await db.select({ + name: sql`upper(${usersTable.name})`, + }).from(usersTable); + + expect(users).toEqual([{ name: 'JOHN' }]); +}); + +test('select distinct', async () => { + const usersDistinctTable = singlestoreTable('users_distinct', { + id: int('id').notNull(), + name: text('name').notNull(), + }); + + await db.execute(sql`drop table if exists ${usersDistinctTable}`); + await db.execute(sql`create table ${usersDistinctTable} (id int, name text)`); + + await db.insert(usersDistinctTable).values([ + { id: 1, name: 'John' }, + { id: 1, name: 'John' }, + { id: 2, name: 'John' }, + { id: 1, name: 'Jane' }, + ]); + const users = await db.selectDistinct().from(usersDistinctTable).orderBy( + usersDistinctTable.id, + usersDistinctTable.name, + ); + + await db.execute(sql`drop table ${usersDistinctTable}`); + + expect(users).toEqual([{ id: 1, name: 'Jane' }, { id: 1, name: 'John' }, { id: 2, name: 'John' }]); +}); + +test('insert returning sql', async () => { + const [result, _] = await db.insert(usersTable).values({ id: 1, name: 'John' }); + + expect(result.insertId).toBe(1); +}); + +test('delete returning sql', async () => { + await db.insert(usersTable).values({ name: 'John' }); + const users = await db.delete(usersTable).where(eq(usersTable.name, 'John')); + + expect(users[0].affectedRows).toBe(1); +}); + +test('update returning sql', async () => { + await db.insert(usersTable).values({ name: 'John' }); + const users = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')); + + expect(users[0].changedRows).toBe(1); +}); + +test('update with returning all fields', async () => { + await db.insert(usersTable).values({ id: 1, name: 'John' }); + const updatedUsers = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')); + + const users = await db.select().from(usersTable).where(eq(usersTable.id, 1)); + + expect(updatedUsers[0].changedRows).toBe(1); + + expect(users[0]!.createdAt).toBeInstanceOf(Date); + // not timezone based timestamp, thats why it should not work here + // t.assert(Math.abs(users[0]!.createdAt.getTime() - now) < 2000); + expect(users).toEqual([{ id: 1, name: 'Jane', verified: false, jsonb: null, createdAt: users[0]!.createdAt }]); +}); + +test('update with returning partial', async () => { + await db.insert(usersTable).values({ id: 1, name: 'John' }); + const updatedUsers = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')); + + const users = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).where( + eq(usersTable.id, 1), + ); + + expect(updatedUsers[0].changedRows).toBe(1); + + expect(users).toEqual([{ id: 1, name: 'Jane' }]); +}); + +test('delete with returning all fields', async () => { + await db.insert(usersTable).values({ name: 'John' }); + const deletedUser = await db.delete(usersTable).where(eq(usersTable.name, 'John')); + + expect(deletedUser[0].affectedRows).toBe(1); +}); + +test('delete with returning partial', async () => { + await db.insert(usersTable).values({ name: 'John' }); + const deletedUser = await db.delete(usersTable).where(eq(usersTable.name, 'John')); + + expect(deletedUser[0].affectedRows).toBe(1); +}); + +test('insert + select', async () => { + await db.insert(usersTable).values({ id: 1, name: 'John' }); + const result = await db.select().from(usersTable); + expect(result).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); + + await db.insert(usersTable).values({ id: 2, name: 'Jane' }); + const result2 = await db.select().from(usersTable).orderBy(asc(usersTable.id)); + expect(result2).toEqual([ + { id: 1, name: 'John', verified: false, jsonb: null, createdAt: result2[0]!.createdAt }, + { id: 2, name: 'Jane', verified: false, jsonb: null, createdAt: result2[1]!.createdAt }, + ]); +}); + +test('json insert', async () => { + await db.insert(usersTable).values({ id: 1, name: 'John', jsonb: ['foo', 'bar'] }); + const result = await db.select({ + id: usersTable.id, + name: usersTable.name, + jsonb: usersTable.jsonb, + }).from(usersTable); + + expect(result).toEqual([{ id: 1, name: 'John', jsonb: ['foo', 'bar'] }]); +}); + +test('insert with overridden default values', async () => { + await db.insert(usersTable).values({ id: 1, name: 'John', verified: true }); + const result = await db.select().from(usersTable); + + expect(result).toEqual([{ id: 1, name: 'John', verified: true, jsonb: null, createdAt: result[0]!.createdAt }]); +}); + +test('insert many', async () => { + await db.insert(usersTable).values([ + { id: 1, name: 'John' }, + { id: 2, name: 'Bruce', jsonb: ['foo', 'bar'] }, + { id: 3, name: 'Jane' }, + { id: 4, name: 'Austin', verified: true }, + ]); + const result = await db.select({ + id: usersTable.id, + name: usersTable.name, + jsonb: usersTable.jsonb, + verified: usersTable.verified, + }).from(usersTable) + .orderBy(asc(usersTable.id)); + + expect(result).toEqual([ + { id: 1, name: 'John', jsonb: null, verified: false }, + { id: 2, name: 'Bruce', jsonb: ['foo', 'bar'], verified: false }, + { id: 3, name: 'Jane', jsonb: null, verified: false }, + { id: 4, name: 'Austin', jsonb: null, verified: true }, + ]); +}); + +test('insert many with returning', async () => { + const result = await db.insert(usersTable).values([ + { name: 'John' }, + { name: 'Bruce', jsonb: ['foo', 'bar'] }, + { name: 'Jane' }, + { name: 'Austin', verified: true }, + ]); + + expect(result[0].affectedRows).toBe(4); +}); + +test('select with group by as field', async () => { + await db.insert(usersTable).values([{ id: 1, name: 'John' }, { id: 2, name: 'Jane' }, { id: 3, name: 'Jane' }]); + + const result = await db.select({ name: usersTable.name }).from(usersTable) + .groupBy(usersTable.name) + .orderBy(asc(usersTable.id)); + + expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }]); +}); + +test('select with group by as sql', async () => { + await db.insert(usersTable).values([{ id: 1, name: 'John' }, { id: 2, name: 'Jane' }, { id: 3, name: 'Jane' }]); + + const result = await db.select({ name: usersTable.name }).from(usersTable) + .groupBy(sql`${usersTable.name}`) + .orderBy(asc(usersTable.id)); + + expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }]); +}); + +test('select with group by as sql + column', async () => { + await db.insert(usersTable).values([{ id: 1, name: 'John' }, { id: 2, name: 'Jane' }, { id: 3, name: 'Jane' }]); + + const result = await db.select({ name: usersTable.name }).from(usersTable) + .groupBy(sql`${usersTable.name}`, usersTable.id) + .orderBy(asc(usersTable.id)); + + expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); +}); + +test('select with group by as column + sql', async () => { + await db.insert(usersTable).values([{ id: 1, name: 'John' }, { id: 2, name: 'Jane' }, { id: 3, name: 'Jane' }]); + + const result = await db.select({ name: usersTable.name }).from(usersTable) + .groupBy(usersTable.id, sql`${usersTable.name}`) + .orderBy(asc(usersTable.id)); + + expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); +}); + +test('select with group by complex query', async () => { + await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); + + const result = await db.select({ name: usersTable.name }).from(usersTable) + .groupBy(usersTable.id, sql`${usersTable.name}`) + .orderBy(asc(usersTable.name)) + .limit(1); + + expect(result).toEqual([{ name: 'Jane' }]); +}); + +test('build query', async () => { + const query = db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable) + .groupBy(usersTable.id, usersTable.name) + .toSQL(); + + expect(query).toEqual({ + sql: `select \`id\`, \`name\` from \`${getTableName(usersTable)}\` group by \`${ + getTableName(usersTable) + }\`.\`id\`, \`${getTableName(usersTable)}\`.\`name\``, + params: [], + }); +}); + +test('build query insert with onDuplicate', async () => { + const query = db.insert(usersTable) + .values({ name: 'John', jsonb: ['foo', 'bar'] }) + .onDuplicateKeyUpdate({ set: { name: 'John1' } }) + .toSQL(); + + expect(query).toEqual({ + sql: `insert into \`${ + getTableName(usersTable) + }\` (\`id\`, \`name\`, \`verified\`, \`jsonb\`, \`created_at\`) values (default, ?, default, ?, default) on duplicate key update \`name\` = ?`, + params: ['John', '["foo","bar"]', 'John1'], + }); +}); + +test('insert with onDuplicate', async () => { + await db.insert(usersTable) + .values({ id: 1, name: 'John' }); + + await db.insert(usersTable) + .values({ id: 1, name: 'John' }) + .onDuplicateKeyUpdate({ set: { name: 'John1' } }); + + const res = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).where( + eq(usersTable.id, 1), + ); + + expect(res).toEqual([{ id: 1, name: 'John1' }]); +}); + +test('insert conflict', async () => { + await db.insert(usersTable) + .values({ name: 'John' }); + + await expect((async () => { + db.insert(usersTable).values({ id: 1, name: 'John1' }); + })()).resolves.not.toThrowError(); +}); + +test('insert conflict with ignore', async () => { + await db.insert(usersTable) + .values({ id: 1, name: 'John' }); + + await db.insert(usersTable) + .ignore() + .values({ id: 1, name: 'John1' }); + + const res = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).where( + eq(usersTable.id, 1), + ); + + expect(res).toEqual([{ id: 1, name: 'John' }]); +}); + +test('insert sql', async () => { + await db.insert(usersTable).values({ id: 1, name: sql`${'John'}` }); + const result = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable); + expect(result).toEqual([{ id: 1, name: 'John' }]); +}); + +test('partial join with alias', async () => { + const customerAlias = alias(usersTable, 'customer'); + + await db.insert(usersTable).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); + const result = await db + .select({ + user: { + id: usersTable.id, + name: usersTable.name, + }, + customer: { + id: customerAlias.id, + name: customerAlias.name, + }, + }).from(usersTable) + .leftJoin(customerAlias, eq(customerAlias.id, 11)) + .where(eq(usersTable.id, 10)) + .orderBy(asc(usersTable.id)); + + expect(result).toEqual([{ + user: { id: 10, name: 'Ivan' }, + customer: { id: 11, name: 'Hans' }, + }]); +}); + +test('full join with alias', async () => { + const singlestoreTable = singlestoreTableCreator((name) => `prefixed_${name}`); + + const users = singlestoreTable('users', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + + await db.execute(sql`drop table if exists ${users}`); + await db.execute(sql`create table ${users} (id serial primary key, name text not null)`); + + const customers = alias(users, 'customer'); + + await db.insert(users).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); + const result = await db + .select().from(users) + .leftJoin(customers, eq(customers.id, 11)) + .where(eq(users.id, 10)) + .orderBy(asc(users.id)); + + expect(result).toEqual([{ + users: { + id: 10, + name: 'Ivan', + }, + customer: { + id: 11, + name: 'Hans', + }, + }]); + + await db.execute(sql`drop table ${users}`); +}); + +test('select from alias', async () => { + const singlestoreTable = singlestoreTableCreator((name) => `prefixed_${name}`); + + const users = singlestoreTable('users', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + + await db.execute(sql`drop table if exists ${users}`); + await db.execute(sql`create table ${users} (id serial primary key, name text not null)`); + + const user = alias(users, 'user'); + const customers = alias(users, 'customer'); + + await db.insert(users).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); + const result = await db + .select() + .from(user) + .leftJoin(customers, eq(customers.id, 11)) + .where(eq(user.id, 10)) + .orderBy(asc(user.id)); + + expect(result).toEqual([{ + user: { + id: 10, + name: 'Ivan', + }, + customer: { + id: 11, + name: 'Hans', + }, + }]); + + await db.execute(sql`drop table ${users}`); +}); + +test('insert with spaces', async () => { + await db.insert(usersTable).values({ id: 1, name: sql`'Jo h n'` }); + const result = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable); + + expect(result).toEqual([{ id: 1, name: 'Jo h n' }]); +}); + +test('prepared statement', async () => { + await db.insert(usersTable).values({ id: 1, name: 'John' }); + const statement = db.select({ + id: usersTable.id, + name: usersTable.name, + }).from(usersTable) + .prepare(); + const result = await statement.execute(); + + expect(result).toEqual([{ id: 1, name: 'John' }]); +}); + +test('prepared statement reuse', async () => { + const stmt = db.insert(usersTable).values({ + verified: true, + id: sql.placeholder('id'), + name: sql.placeholder('name'), + }).prepare(); + + for (let i = 0; i < 10; i++) { + await stmt.execute({ id: i + 1, name: `John ${i}` }); + } + + const result = await db.select({ + id: usersTable.id, + name: usersTable.name, + verified: usersTable.verified, + }).from(usersTable) + .orderBy(asc(usersTable.id)); + + expect(result).toEqual([ + { id: 1, name: 'John 0', verified: true }, + { id: 2, name: 'John 1', verified: true }, + { id: 3, name: 'John 2', verified: true }, + { id: 4, name: 'John 3', verified: true }, + { id: 5, name: 'John 4', verified: true }, + { id: 6, name: 'John 5', verified: true }, + { id: 7, name: 'John 6', verified: true }, + { id: 8, name: 'John 7', verified: true }, + { id: 9, name: 'John 8', verified: true }, + { id: 10, name: 'John 9', verified: true }, + ]); +}); + +test('prepared statement with placeholder in .where', async () => { + await db.insert(usersTable).values({ id: 1, name: 'John' }); + const stmt = db.select({ + id: usersTable.id, + name: usersTable.name, + }).from(usersTable) + .where(eq(usersTable.id, sql.placeholder('id'))) + .prepare(); + const result = await stmt.execute({ id: 1 }); + + expect(result).toEqual([{ id: 1, name: 'John' }]); +}); + +test('migrator', async () => { + const usersMigratorTable = singlestoreTableRaw('users12', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + email: text('email').notNull(), + }, (table) => { + return { + name: uniqueIndex('').on(table.name).using('btree'), + }; + }); + + await db.execute(sql.raw(`drop table if exists cities_migration`)); + await db.execute(sql.raw(`drop table if exists users_migration`)); + await db.execute(sql.raw(`drop table if exists users12`)); + await db.execute(sql.raw(`drop table if exists __drizzle_migrations`)); + + await migrate(db, { migrationsFolder: './drizzle2/singlestore' }); + + await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); + + const result = await db.select().from(usersMigratorTable); + + expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); + + await db.execute(sql.raw(`drop table cities_migration`)); + await db.execute(sql.raw(`drop table users_migration`)); + await db.execute(sql.raw(`drop table users12`)); + await db.execute(sql.raw(`drop table __drizzle_migrations`)); +}); + +test('insert via db.execute + select via db.execute', async () => { + await db.execute( + sql`insert into ${usersTable} (${new Name(usersTable.id.name)}, ${new Name( + usersTable.name.name, + )}) values (1, ${'John'})`, + ); + + const result = await db.execute<{ id: number; name: string }>(sql`select id, name from ${usersTable}`); + expect(result[0]).toEqual([{ id: 1, name: 'John' }]); +}); + +test('insert via db.execute w/ query builder', async () => { + const inserted = await db.execute( + db.insert(usersTable).values({ name: 'John' }), + ); + expect(inserted[0].affectedRows).toBe(1); +}); + +test('insert + select all possible dates', async () => { + const datesTable = singlestoreTable('datestable', { + date: date('date'), + dateAsString: date('date_as_string', { mode: 'string' }), + time: time('time'), + datetime: datetime('datetime'), + datetimeAsString: datetime('datetime_as_string', { mode: 'string' }), + year: year('year'), + }); + + await db.execute(sql`drop table if exists ${datesTable}`); + await db.execute( + sql` + create table ${datesTable} ( + \`date\` date, + \`date_as_string\` date, + \`time\` time, + \`datetime\` datetime, + \`datetime_as_string\` datetime, + \`year\` year + ) + `, + ); + + const d = new Date('2022-11-11'); + + await db.insert(datesTable).values({ + date: d, + dateAsString: '2022-11-11', + time: '12:12:12', + datetime: d, + year: 22, + datetimeAsString: '2022-11-11 12:12:12', + }); + + const res = await db.select().from(datesTable); + + expect(res[0]?.date).toBeInstanceOf(Date); + expect(res[0]?.datetime).toBeInstanceOf(Date); + expect(typeof res[0]?.dateAsString).toBe('string'); + expect(typeof res[0]?.datetimeAsString).toBe('string'); + + expect(res).toEqual([{ + date: toLocalDate(new Date('2022-11-11')), + dateAsString: '2022-11-11', + time: '12:12:12', + datetime: new Date('2022-11-11'), + year: 2022, + datetimeAsString: '2022-11-11 12:12:12', + }]); + + await db.execute(sql`drop table ${datesTable}`); +}); + +test('SingleStore enum test case #1', async () => { + const tableWithEnums = singlestoreTable('enums_test_case', { + id: serial('id').primaryKey(), + enum1: singlestoreEnum('enum1', ['a', 'b', 'c']).notNull(), + enum2: singlestoreEnum('enum2', ['a', 'b', 'c']).default('a'), + enum3: singlestoreEnum('enum3', ['a', 'b', 'c']).notNull().default('b'), + }); + + await db.execute(sql`drop table if exists ${tableWithEnums}`); + + await db.execute(sql` + create table ${tableWithEnums} ( + \`id\` serial primary key, + \`enum1\` ENUM('a', 'b', 'c') not null, + \`enum2\` ENUM('a', 'b', 'c') default 'a', + \`enum3\` ENUM('a', 'b', 'c') not null default 'b' + ) + `); + + await db.insert(tableWithEnums).values([ + { id: 1, enum1: 'a', enum2: 'b', enum3: 'c' }, + { id: 2, enum1: 'a', enum3: 'c' }, + { id: 3, enum1: 'a' }, + ]); + + const res = await db.select().from(tableWithEnums).orderBy(asc(tableWithEnums.id)); + + await db.execute(sql`drop table ${tableWithEnums}`); + + expect(res).toEqual([ + { id: 1, enum1: 'a', enum2: 'b', enum3: 'c' }, + { id: 2, enum1: 'a', enum2: 'a', enum3: 'c' }, + { id: 3, enum1: 'a', enum2: 'a', enum3: 'b' }, + ]); +}); + +test('left join (flat object fields)', async () => { + await db.insert(citiesTable) + .values([{ id: 1, name: 'Paris' }, { id: 2, name: 'London' }]); + + await db.insert(users2Table).values([{ id: 1, name: 'John', cityId: 1 }, { id: 2, name: 'Jane' }]); + + const res = await db.select({ + userId: users2Table.id, + userName: users2Table.name, + cityId: citiesTable.id, + cityName: citiesTable.name, + }).from(users2Table) + .leftJoin(citiesTable, eq(users2Table.cityId, citiesTable.id)) + .orderBy(asc(users2Table.id)); + + expect(res).toEqual([ + { userId: 1, userName: 'John', cityId: 1, cityName: 'Paris' }, + { userId: 2, userName: 'Jane', cityId: null, cityName: null }, + ]); +}); + +test('left join (grouped fields)', async () => { + await db.insert(citiesTable) + .values([{ id: 1, name: 'Paris' }, { id: 2, name: 'London' }]); + + await db.insert(users2Table).values([{ id: 1, name: 'John', cityId: 1 }, { id: 2, name: 'Jane' }]); + + const res = await db.select({ + id: users2Table.id, + user: { + name: users2Table.name, + nameUpper: sql`upper(${users2Table.name})`, + }, + city: { + id: citiesTable.id, + name: citiesTable.name, + nameUpper: sql`upper(${citiesTable.name})`, + }, + }).from(users2Table) + .leftJoin(citiesTable, eq(users2Table.cityId, citiesTable.id)) + .orderBy(asc(users2Table.id)); + + expect(res).toEqual([ + { + id: 1, + user: { name: 'John', nameUpper: 'JOHN' }, + city: { id: 1, name: 'Paris', nameUpper: 'PARIS' }, + }, + { + id: 2, + user: { name: 'Jane', nameUpper: 'JANE' }, + city: null, + }, + ]); +}); + +test('left join (all fields)', async () => { + await db.insert(citiesTable) + .values([{ id: 1, name: 'Paris' }, { id: 2, name: 'London' }]); + + await db.insert(users2Table).values([{ id: 1, name: 'John', cityId: 1 }, { id: 2, name: 'Jane' }]); + + const res = await db.select().from(users2Table) + .leftJoin(citiesTable, eq(users2Table.cityId, citiesTable.id)) + .orderBy(asc(users2Table.id)); + + expect(res).toEqual([ + { + users2: { + id: 1, + name: 'John', + cityId: 1, + }, + cities: { + id: 1, + name: 'Paris', + }, + }, + { + users2: { + id: 2, + name: 'Jane', + cityId: null, + }, + cities: null, + }, + ]); +}); + +test('join subquery', async () => { + const coursesTable = singlestoreTable('courses', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + categoryId: int('category_id'), + }); + + const courseCategoriesTable = singlestoreTable('course_categories', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + + await db.execute(sql`drop table if exists ${coursesTable}`); + await db.execute(sql`drop table if exists ${courseCategoriesTable}`); + + await db.execute( + sql` + create table ${courseCategoriesTable} ( + \`id\` serial primary key, + \`name\` text not null + ) + `, + ); + + await db.execute( + sql` + create table ${coursesTable} ( + \`id\` serial primary key, + \`name\` text not null, + \`category_id\` int + ) + `, + ); + + await db.insert(courseCategoriesTable).values([ + { id: 1, name: 'Category 1' }, + { id: 2, name: 'Category 2' }, + { id: 3, name: 'Category 3' }, + { id: 4, name: 'Category 4' }, + ]); + + await db.insert(coursesTable).values([ + { id: 1, name: 'Development', categoryId: 2 }, + { id: 2, name: 'IT & Software', categoryId: 3 }, + { id: 3, name: 'Marketing', categoryId: 4 }, + { id: 4, name: 'Design', categoryId: 1 }, + ]); + + const sq2 = db + .select({ + categoryId: courseCategoriesTable.id, + category: courseCategoriesTable.name, + total: sql`count(${courseCategoriesTable.id})`, + }) + .from(courseCategoriesTable) + .groupBy(courseCategoriesTable.id, courseCategoriesTable.name) + .orderBy(courseCategoriesTable.id) + .as('sq2'); + + const res = await db + .select({ + courseName: coursesTable.name, + categoryId: sq2.categoryId, + }) + .from(coursesTable) + .leftJoin(sq2, eq(coursesTable.categoryId, sq2.categoryId)) + .orderBy(coursesTable.name); + + await db.execute(sql`drop table ${coursesTable}`); + await db.execute(sql`drop table ${courseCategoriesTable}`); + + expect(res).toEqual([ + { courseName: 'Design', categoryId: 1 }, + { courseName: 'Development', categoryId: 2 }, + { courseName: 'IT & Software', categoryId: 3 }, + { courseName: 'Marketing', categoryId: 4 }, + ]); +}); + +test('with ... select', async () => { + const orders = singlestoreTable('orders', { + id: serial('id').primaryKey(), + region: text('region').notNull(), + product: text('product').notNull(), + amount: int('amount').notNull(), + quantity: int('quantity').notNull(), + }); + + await db.execute(sql`drop table if exists ${orders}`); + await db.execute( + sql` + create table ${orders} ( + \`id\` serial primary key, + \`region\` text not null, + \`product\` text not null, + \`amount\` int not null, + \`quantity\` int not null + ) + `, + ); + + await db.insert(orders).values([ + { region: 'Europe', product: 'A', amount: 10, quantity: 1 }, + { region: 'Europe', product: 'A', amount: 20, quantity: 2 }, + { region: 'Europe', product: 'B', amount: 20, quantity: 2 }, + { region: 'Europe', product: 'B', amount: 30, quantity: 3 }, + { region: 'US', product: 'A', amount: 30, quantity: 3 }, + { region: 'US', product: 'A', amount: 40, quantity: 4 }, + { region: 'US', product: 'B', amount: 40, quantity: 4 }, + { region: 'US', product: 'B', amount: 50, quantity: 5 }, + ]); + + const regionalSales = db + .$with('regional_sales') + .as( + db + .select({ + region: orders.region, + totalSales: sql`sum(${orders.amount})`.as('total_sales'), + }) + .from(orders) + .groupBy(orders.region), + ); + + const topRegions = db + .$with('top_regions') + .as( + db + .select({ + region: regionalSales.region, + }) + .from(regionalSales) + .where( + gt( + regionalSales.totalSales, + db.select({ sales: sql`sum(${regionalSales.totalSales})/10` }).from(regionalSales), + ), + ), + ); + + const result = await db + .with(regionalSales, topRegions) + .select({ + region: orders.region, + product: orders.product, + productUnits: sql`cast(sum(${orders.quantity}) as unsigned)`, + productSales: sql`cast(sum(${orders.amount}) as unsigned)`, + }) + .from(orders) + .where(inArray(orders.region, db.select({ region: topRegions.region }).from(topRegions))) + .groupBy(orders.region, orders.product) + .orderBy(orders.region, orders.product); + + await db.execute(sql`drop table ${orders}`); + + expect(result).toEqual([ + { + region: 'Europe', + product: 'A', + productUnits: 3, + productSales: 30, + }, + { + region: 'Europe', + product: 'B', + productUnits: 5, + productSales: 50, + }, + { + region: 'US', + product: 'A', + productUnits: 7, + productSales: 70, + }, + { + region: 'US', + product: 'B', + productUnits: 9, + productSales: 90, + }, + ]); +}); + +test('select from subquery sql', async () => { + await db.insert(users2Table).values([{ id: 1, name: 'John' }, { id: 2, name: 'Jane' }]); + + const sq = db + .select({ name: sql`concat(${users2Table.name}, " modified")`.as('name') }) + .from(users2Table) + .orderBy(asc(users2Table.id)) + .as('sq'); + + const res = await db.select({ name: sq.name }).from(sq); + + expect(res).toEqual([{ name: 'John modified' }, { name: 'Jane modified' }]); +}); + +test('select a field without joining its table', () => { + expect(() => db.select({ name: users2Table.name }).from(usersTable).prepare()).toThrowError(); +}); + +test('select all fields from subquery without alias', () => { + const sq = db.$with('sq').as(db.select({ name: sql`upper(${users2Table.name})` }).from(users2Table)); + + expect(() => db.select().from(sq).prepare()).toThrowError(); +}); + +test('select count()', async () => { + await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }]); + + const res = await db.select({ count: sql`count(*)` }).from(usersTable); + + expect(res).toEqual([{ count: 2 }]); +}); + +test('select for ...', () => { + { + const query = db.select().from(users2Table).for('update').toSQL(); + expect(query.sql).toMatch(/ for update$/); + } + { + const query = db.select().from(users2Table).for('share', { skipLocked: true }).toSQL(); + expect(query.sql).toMatch(/ for share skip locked$/); + } + { + const query = db.select().from(users2Table).for('update', { noWait: true }).toSQL(); + expect(query.sql).toMatch(/ for update no wait$/); + } +}); + +test('having', async () => { + await db.insert(citiesTable).values([{ id: 1, name: 'London' }, { id: 2, name: 'Paris' }, { + id: 3, + name: 'New York', + }]); + + await db.insert(users2Table).values([{ id: 1, name: 'John', cityId: 1 }, { id: 2, name: 'Jane', cityId: 1 }, { + id: 3, + name: 'Jack', + cityId: 2, + }]); + + const result = await db + .select({ + id: citiesTable.id, + name: sql`upper(${citiesTable.name})`.as('upper_name'), + usersCount: sql`count(${users2Table.id})`.as('users_count'), + }) + .from(citiesTable) + .leftJoin(users2Table, eq(users2Table.cityId, citiesTable.id)) + .where(({ name }) => sql`length(${name}) >= 3`) + .groupBy(citiesTable.id) + .having(({ usersCount }) => sql`${usersCount} > 0`) + .orderBy(({ name }) => name); + + expect(result).toEqual([ + { + id: 1, + name: 'LONDON', + usersCount: 2, + }, + { + id: 2, + name: 'PARIS', + usersCount: 1, + }, + ]); +}); + +// TODO: Unskip when views are supported +/* test.skip('view', async () => { + const newYorkers1 = singlestoreView('new_yorkers') + .as((qb) => qb.select().from(users2Table).where(eq(users2Table.cityId, 1))); + + const newYorkers2 = singlestoreView('new_yorkers', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: int('city_id').notNull(), + }).as(sql`select * from ${users2Table} where ${eq(users2Table.cityId, 1)}`); + + const newYorkers3 = singlestoreView('new_yorkers', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: int('city_id').notNull(), + }).existing(); + + await db.execute(sql`create view new_yorkers as ${getViewConfig(newYorkers1).query}`); + + await db.insert(citiesTable).values([{ name: 'New York' }, { name: 'Paris' }]); + + await db.insert(users2Table).values([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 1 }, + { id: 3, name: 'Jack', cityId: 2 }, + ]); + + { + const result = await db.select().from(newYorkers1).orderBy(asc(newYorkers1.id)); + expect(result).toEqual([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 1 }, + ]); + } + + { + const result = await db.select().from(newYorkers2).orderBy(asc(newYorkers2.id)); + expect(result).toEqual([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 1 }, + ]); + } + + { + const result = await db.select().from(newYorkers3).orderBy(asc(newYorkers3.id)); + expect(result).toEqual([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 1 }, + ]); + } + + { + const result = await db.select({ name: newYorkers1.name }).from(newYorkers1).orderBy(asc(newYorkers1.id)); + expect(result).toEqual([ + { name: 'John' }, + { name: 'Jane' }, + ]); + } + + await db.execute(sql`drop view ${newYorkers1}`); +}); */ + +test('select from raw sql', async () => { + const result = await db.select({ + id: sql`id`, + name: sql`name`, + }).from(sql`(select 1 as id, 'John' as name) as users`); + + Expect>; + + expect(result).toEqual([ + { id: 1, name: 'John' }, + ]); +}); + +test('select from raw sql with joins', async () => { + const result = await db + .select({ + id: sql`users.id`, + name: sql`users.name`, + userCity: sql`users.city`, + cityName: sql`cities.name`, + }) + .from(sql`(select 1 as id, 'John' as name, 'New York' as city) as users`) + .leftJoin(sql`(select 1 as id, 'Paris' as name) as cities`, sql`cities.id = users.id`); + + Expect>; + + expect(result).toEqual([ + { id: 1, name: 'John', userCity: 'New York', cityName: 'Paris' }, + ]); +}); + +test('join on aliased sql from select', async () => { + const result = await db + .select({ + userId: sql`users.id`.as('userId'), + name: sql`users.name`, + userCity: sql`users.city`, + cityId: sql`cities.id`.as('cityId'), + cityName: sql`cities.name`, + }) + .from(sql`(select 1 as id, 'John' as name, 'New York' as city) as users`) + .leftJoin(sql`(select 1 as id, 'Paris' as name) as cities`, (cols) => eq(cols.cityId, cols.userId)); + + Expect>; + + expect(result).toEqual([ + { userId: 1, name: 'John', userCity: 'New York', cityId: 1, cityName: 'Paris' }, + ]); +}); + +test('join on aliased sql from with clause', async () => { + const users = db.$with('users').as( + db.select({ + id: sql`id`.as('userId'), + name: sql`name`.as('userName'), + city: sql`city`.as('city'), + }).from( + sql`(select 1 as id, 'John' as name, 'New York' as city) as users`, + ), + ); + + const cities = db.$with('cities').as( + db.select({ + id: sql`id`.as('cityId'), + name: sql`name`.as('cityName'), + }).from( + sql`(select 1 as id, 'Paris' as name) as cities`, + ), + ); + + const result = await db + .with(users, cities) + .select({ + userId: users.id, + name: users.name, + userCity: users.city, + cityId: cities.id, + cityName: cities.name, + }) + .from(users) + .leftJoin(cities, (cols) => eq(cols.cityId, cols.userId)); + + Expect>; + + expect(result).toEqual([ + { userId: 1, name: 'John', userCity: 'New York', cityId: 1, cityName: 'Paris' }, + ]); +}); + +test('prefixed table', async () => { + const singlestoreTable = singlestoreTableCreator((name) => `myprefix_${name}`); + + const users = singlestoreTable('test_prefixed_table_with_unique_name', { + id: int('id').primaryKey(), + name: text('name').notNull(), + }); + + await db.execute(sql`drop table if exists ${users}`); + + await db.execute( + sql`create table myprefix_test_prefixed_table_with_unique_name (id int not null primary key, name text not null)`, + ); + + await db.insert(users).values({ id: 1, name: 'John' }); + + const result = await db.select().from(users); + + expect(result).toEqual([{ id: 1, name: 'John' }]); + + await db.execute(sql`drop table ${users}`); +}); + +test('orderBy with aliased column', () => { + const query = db.select({ + test: sql`something`.as('test'), + }).from(users2Table).orderBy((fields) => fields.test).toSQL(); + + expect(query.sql).toBe(`select something as \`test\` from \`${getTableName(users2Table)}\` order by \`test\``); +}); + +test('timestamp timezone', async () => { + const date = new Date(Date.parse('2020-01-01T12:34:56+07:00')); + + await db.insert(usersTable).values({ id: 1, name: 'With default times' }); + await db.insert(usersTable).values({ + id: 2, + name: 'Without default times', + createdAt: date, + }); + const users = await db.select().from(usersTable).orderBy(asc(usersTable.id)); + + // check that the timestamps are set correctly for default times + expect(Math.abs(users[0]!.createdAt.getTime() - Date.now())).toBeLessThan(2000); + + // check that the timestamps are set correctly for non default times + expect(Math.abs(users[1]!.createdAt.getTime() - date.getTime())).toBeLessThan(2000); +}); + +test('transaction', async () => { + const users = singlestoreTable('users_transactions', { + id: serial('id').primaryKey(), + balance: int('balance').notNull(), + }); + const products = singlestoreTable('products_transactions', { + id: serial('id').primaryKey(), + price: int('price').notNull(), + stock: int('stock').notNull(), + }); + + await db.execute(sql`drop table if exists ${users}`); + await db.execute(sql`drop table if exists ${products}`); + + await db.execute(sql`create table ${users} (id serial not null primary key, balance int not null)`); + await db.execute( + sql`create table ${products} (id serial not null primary key, price int not null, stock int not null)`, + ); + + const [{ insertId: userId }] = await db.insert(users).values({ id: 1, balance: 100 }); + const user = await db.select().from(users).where(eq(users.id, userId)).then((rows) => rows[0]!); + const [{ insertId: productId }] = await db.insert(products).values({ id: 1, price: 10, stock: 10 }); + const product = await db.select().from(products).where(eq(products.id, productId)).then((rows) => rows[0]!); + + await db.transaction(async (tx) => { + await tx.update(users).set({ balance: user.balance - product.price }).where(eq(users.id, user.id)); + await tx.update(products).set({ stock: product.stock - 1 }).where(eq(products.id, product.id)); + }); + + const result = await db.select().from(users); + + await db.execute(sql`drop table ${users}`); + await db.execute(sql`drop table ${products}`); + + expect(result).toEqual([{ id: 1, balance: 90 }]); +}); + +test('transaction rollback', async () => { + const users = singlestoreTable('users_transactions_rollback', { + id: serial('id').primaryKey(), + balance: int('balance').notNull(), + }); + + await db.execute(sql`drop table if exists ${users}`); + + await db.execute( + sql`create table ${users} (id serial not null primary key, balance int not null)`, + ); + + await expect((async () => { + await db.transaction(async (tx) => { + await tx.insert(users).values({ balance: 100 }); + tx.rollback(); + }); + })()).rejects.toThrowError(TransactionRollbackError); + + const result = await db.select().from(users); + + await db.execute(sql`drop table ${users}`); + + expect(result).toEqual([]); +}); + +test('join subquery with join', async () => { + const internalStaff = singlestoreTable('internal_staff', { + userId: int('user_id').notNull(), + }); + + const customUser = singlestoreTable('custom_user', { + id: int('id').notNull(), + }); + + const ticket = singlestoreTable('ticket', { + staffId: int('staff_id').notNull(), + }); + + await db.execute(sql`drop table if exists ${internalStaff}`); + await db.execute(sql`drop table if exists ${customUser}`); + await db.execute(sql`drop table if exists ${ticket}`); + + await db.execute(sql`create table ${internalStaff} (user_id integer not null)`); + await db.execute(sql`create table ${customUser} (id integer not null)`); + await db.execute(sql`create table ${ticket} (staff_id integer not null)`); + + await db.insert(internalStaff).values({ userId: 1 }); + await db.insert(customUser).values({ id: 1 }); + await db.insert(ticket).values({ staffId: 1 }); + + const subq = db + .select() + .from(internalStaff) + .leftJoin(customUser, eq(internalStaff.userId, customUser.id)) + .as('internal_staff'); + + const mainQuery = await db + .select() + .from(ticket) + .leftJoin(subq, eq(subq.internal_staff.userId, ticket.staffId)); + + await db.execute(sql`drop table ${internalStaff}`); + await db.execute(sql`drop table ${customUser}`); + await db.execute(sql`drop table ${ticket}`); + + expect(mainQuery).toEqual([{ + ticket: { staffId: 1 }, + internal_staff: { + internal_staff: { userId: 1 }, + custom_user: { id: 1 }, + }, + }]); +}); + +// TODO: Unskip when views are supported +/* test.skip('subquery with view', async () => { + const users = singlestoreTable('users_subquery_view', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: int('city_id').notNull(), + }); + + const newYorkers = singlestoreView('new_yorkers').as((qb) => qb.select().from(users).where(eq(users.cityId, 1))); + + await db.execute(sql`drop table if exists ${users}`); + await db.execute(sql`drop view if exists ${newYorkers}`); + + await db.execute( + sql`create table ${users} (id serial not null primary key, name text not null, city_id integer not null)`, + ); + await db.execute(sql`create view ${newYorkers} as select * from ${users} where city_id = 1`); + + await db.insert(users).values([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 2 }, + { id: 3, name: 'Jack', cityId: 1 }, + { id: 4, name: 'Jill', cityId: 2 }, + ]); + + const sq = db.$with('sq').as(db.select().from(newYorkers)); + const result = await db.with(sq).select().from(sq).orderBy(asc(sq.id)); + + await db.execute(sql`drop view ${newYorkers}`); + await db.execute(sql`drop table ${users}`); + + expect(result).toEqual([ + { id: 1, name: 'John', cityId: 1 }, + { id: 3, name: 'Jack', cityId: 1 }, + ]); +}); */ + +// TODO: Unskip when views are supported +/* test.skip('join view as subquery', async () => { + const users = singlestoreTable('users_join_view', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: int('city_id').notNull(), + }); + + const newYorkers = singlestoreView('new_yorkers').as((qb) => qb.select().from(users).where(eq(users.cityId, 1))); + + await db.execute(sql`drop table if exists ${users}`); + await db.execute(sql`drop view if exists ${newYorkers}`); + + await db.execute( + sql`create table ${users} (id serial not null primary key, name text not null, city_id integer not null)`, + ); + await db.execute(sql`create view ${newYorkers} as select * from ${users} where city_id = 1`); + + await db.insert(users).values([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 2 }, + { id: 3, name: 'Jack', cityId: 1 }, + { id: 4, name: 'Jill', cityId: 2 }, + ]); + + const sq = db.select().from(newYorkers).as('new_yorkers_sq'); + + const result = await db.select().from(users).leftJoin(sq, eq(users.id, sq.id)).orderBy(asc(users.id)); + + expect(result).toEqual([ + { + users_join_view: { id: 1, name: 'John', cityId: 1 }, + new_yorkers_sq: { id: 1, name: 'John', cityId: 1 }, + }, + { + users_join_view: { id: 2, name: 'Jane', cityId: 2 }, + new_yorkers_sq: null, + }, + { + users_join_view: { id: 3, name: 'Jack', cityId: 1 }, + new_yorkers_sq: { id: 3, name: 'Jack', cityId: 1 }, + }, + { + users_join_view: { id: 4, name: 'Jill', cityId: 2 }, + new_yorkers_sq: null, + }, + ]); + + await db.execute(sql`drop view ${newYorkers}`); + await db.execute(sql`drop table ${users}`); +}); */ + +test('select iterator', async () => { + const users = singlestoreTable('users_iterator', { + id: serial('id').primaryKey(), + }); + + await db.execute(sql`drop table if exists ${users}`); + await db.execute(sql`create table ${users} (id serial not null primary key)`); + + await db.insert(users).values([{ id: 1 }, { id: 2 }, { id: 3 }]); + + const iter = db.select().from(users) + .orderBy(asc(users.id)) + .iterator(); + + const result: typeof users.$inferSelect[] = []; + + for await (const row of iter) { + result.push(row); + } + + expect(result).toEqual([{ id: 1 }, { id: 2 }, { id: 3 }]); +}); + +test('select iterator w/ prepared statement', async () => { + const users = singlestoreTable('users_iterator', { + id: serial('id').primaryKey(), + }); + + await db.execute(sql`drop table if exists ${users}`); + await db.execute(sql`create table ${users} (id serial not null primary key)`); + + await db.insert(users).values([{ id: 1 }, { id: 2 }, { id: 3 }]); + + const prepared = db.select().from(users) + .orderBy(asc(users.id)) + .prepare(); + const iter = prepared.iterator(); + const result: typeof users.$inferSelect[] = []; + + for await (const row of iter) { + result.push(row); + } + + expect(result).toEqual([{ id: 1 }, { id: 2 }, { id: 3 }]); +}); + +test('insert undefined', async () => { + const users = singlestoreTable('users', { + id: serial('id').primaryKey(), + name: text('name'), + }); + + await db.execute(sql`drop table if exists ${users}`); + + await db.execute( + sql`create table ${users} (id serial not null primary key, name text)`, + ); + + await expect((async () => { + await db.insert(users).values({ name: undefined }); + })()).resolves.not.toThrowError(); + + await db.execute(sql`drop table ${users}`); +}); + +test('update undefined', async () => { + const users = singlestoreTable('users', { + id: serial('id').primaryKey(), + name: text('name'), + }); + + await db.execute(sql`drop table if exists ${users}`); + + await db.execute( + sql`create table ${users} (id serial not null primary key, name text)`, + ); + + await expect((async () => { + await db.update(users).set({ name: undefined }); + })()).rejects.toThrowError(); + + await expect((async () => { + await db.update(users).set({ id: 1, name: undefined }); + })()).resolves.not.toThrowError(); + + await db.execute(sql`drop table ${users}`); +}); diff --git a/integration-tests/tests/singlestore/singlestore-proxy.test.ts b/integration-tests/tests/singlestore/singlestore-proxy.test.ts new file mode 100644 index 000000000..51dc48a4a --- /dev/null +++ b/integration-tests/tests/singlestore/singlestore-proxy.test.ts @@ -0,0 +1,140 @@ +import retry from 'async-retry'; +import type { SingleStoreRemoteDatabase } from 'drizzle-orm/singlestore-proxy'; +import { drizzle as proxyDrizzle } from 'drizzle-orm/singlestore-proxy'; +import * as mysql2 from 'mysql2/promise'; +import { afterAll, beforeAll, beforeEach } from 'vitest'; +import { skipTests } from '~/common'; +import { createDockerDB, tests } from './singlestore-common'; + +const ENABLE_LOGGING = false; + +// eslint-disable-next-line drizzle-internal/require-entity-kind +class ServerSimulator { + constructor(private db: mysql2.Connection) {} + + async query(sql: string, params: any[], method: 'all' | 'execute') { + if (method === 'all') { + try { + const result = await this.db.query({ + sql, + values: params, + rowsAsArray: true, + typeCast: function(field: any, next: any) { + if (field.type === 'TIMESTAMP' || field.type === 'DATETIME' || field.type === 'DATE') { + return field.string(); + } + return next(); + }, + }); + + return { data: result[0] as any }; + } catch (e: any) { + return { error: e }; + } + } else if (method === 'execute') { + try { + const result = await this.db.query({ + sql, + values: params, + typeCast: function(field: any, next: any) { + if (field.type === 'TIMESTAMP' || field.type === 'DATETIME' || field.type === 'DATE') { + return field.string(); + } + return next(); + }, + }); + + return { data: result as any }; + } catch (e: any) { + return { error: e }; + } + } else { + return { error: 'Unknown method value' }; + } + } + + async migrations(queries: string[]) { + await this.db.query('START TRANSACTION'); + try { + for (const query of queries) { + await this.db.query(query); + } + await this.db.query('COMMIT'); + } catch (e) { + await this.db.query('ROLLBACK'); + throw e; + } + + return {}; + } +} + +let db: SingleStoreRemoteDatabase; +let client: mysql2.Connection; +let serverSimulator: ServerSimulator; + +beforeAll(async () => { + let connectionString; + if (process.env['SINGLESTORE_CONNECTION_STRING']) { + connectionString = process.env['SINGLESTORE_CONNECTION_STRING']; + } else { + const { connectionString: conStr } = await createDockerDB(); + connectionString = conStr; + } + client = await retry(async () => { + client = await mysql2.createConnection(connectionString); + await client.connect(); + return client; + }, { + retries: 20, + factor: 1, + minTimeout: 250, + maxTimeout: 250, + randomize: false, + onRetry() { + client?.end(); + }, + }); + + await client.query(`CREATE DATABASE IF NOT EXISTS drizzle;`); + await client.changeUser({ database: 'drizzle' }); + + serverSimulator = new ServerSimulator(client); + db = proxyDrizzle(async (sql, params, method) => { + try { + const response = await serverSimulator.query(sql, params, method); + + if (response.error !== undefined) { + throw response.error; + } + + return { rows: response.data }; + } catch (e: any) { + console.error('Error from singlestore proxy server:', e.message); + throw e; + } + }, { logger: ENABLE_LOGGING }); +}); + +afterAll(async () => { + await client?.end(); +}); + +beforeEach((ctx) => { + ctx.singlestore = { + db, + }; +}); + +skipTests([ + 'select iterator w/ prepared statement', + 'select iterator', + 'nested transaction rollback', + 'nested transaction', + 'transaction rollback', + 'transaction', + 'transaction with options (set isolationLevel)', + 'migrator', +]); + +tests(); diff --git a/integration-tests/tests/singlestore/singlestore.test.ts b/integration-tests/tests/singlestore/singlestore.test.ts new file mode 100644 index 000000000..bfb1ee5b7 --- /dev/null +++ b/integration-tests/tests/singlestore/singlestore.test.ts @@ -0,0 +1,51 @@ +import retry from 'async-retry'; +import { drizzle } from 'drizzle-orm/singlestore'; +import type { SingleStoreDriverDatabase } from 'drizzle-orm/singlestore'; +import * as mysql2 from 'mysql2/promise'; +import { afterAll, beforeAll, beforeEach } from 'vitest'; +import { createDockerDB, tests } from './singlestore-common'; + +const ENABLE_LOGGING = false; + +let db: SingleStoreDriverDatabase; +let client: mysql2.Connection; + +beforeAll(async () => { + let connectionString; + if (process.env['SINGLESTORE_CONNECTION_STRING']) { + connectionString = process.env['SINGLESTORE_CONNECTION_STRING']; + } else { + const { connectionString: conStr } = await createDockerDB(); + connectionString = conStr; + } + client = await retry(async () => { + client = await mysql2.createConnection(connectionString); + await client.connect(); + return client; + }, { + retries: 20, + factor: 1, + minTimeout: 250, + maxTimeout: 250, + randomize: false, + onRetry() { + client?.end(); + }, + }); + + await client.query(`CREATE DATABASE IF NOT EXISTS drizzle;`); + await client.changeUser({ database: 'drizzle' }); + db = drizzle(client, { logger: ENABLE_LOGGING }); +}); + +afterAll(async () => { + await client?.end(); +}); + +beforeEach((ctx) => { + ctx.singlestore = { + db, + }; +}); + +tests(); diff --git a/integration-tests/vitest.config.ts b/integration-tests/vitest.config.ts index fcce0a362..a04281018 100644 --- a/integration-tests/vitest.config.ts +++ b/integration-tests/vitest.config.ts @@ -10,6 +10,7 @@ export default defineConfig({ 'tests/relational/**/*.test.ts', 'tests/pg/**/*.test.ts', 'tests/mysql/**/*.test.ts', + 'tests/singlestore/**/*.test.ts', 'tests/sqlite/**/*.test.ts', 'tests/replicas/**/*', 'tests/imports/**/*', @@ -58,6 +59,8 @@ export default defineConfig({ 'tests/sqlite/libsql-ws.test.ts', 'tests/sqlite/libsql-http.test.ts', 'tests/mysql/tidb-serverless.test.ts', + // waiting for json_array from singlestore team + 'tests/relational/singlestore.test.ts', ], typecheck: { tsconfig: 'tsconfig.json', diff --git a/package.json b/package.json index d52a568a6..6b4715b0e 100755 --- a/package.json +++ b/package.json @@ -9,7 +9,8 @@ "test": "turbo run test --color", "t": "pnpm test", "test:types": "turbo run test:types --color", - "lint": "concurrently -n eslint,dprint \"eslint --ext ts .\" \"dprint check --list-different\"" + "lint": "dprint check --list-different", + "lint:fix": "dprint fmt" }, "devDependencies": { "@arethetypeswrong/cli": "0.15.3",