summaryrefslogtreecommitdiff
path: root/packages/db/src
diff options
context:
space:
mode:
Diffstat (limited to 'packages/db/src')
-rw-r--r--packages/db/src/core/cli/commands/execute/index.ts70
-rw-r--r--packages/db/src/core/cli/commands/link/index.ts295
-rw-r--r--packages/db/src/core/cli/commands/login/index.ts96
-rw-r--r--packages/db/src/core/cli/commands/logout/index.ts7
-rw-r--r--packages/db/src/core/cli/commands/push/index.ts173
-rw-r--r--packages/db/src/core/cli/commands/shell/index.ts48
-rw-r--r--packages/db/src/core/cli/commands/verify/index.ts58
-rw-r--r--packages/db/src/core/cli/index.ts82
-rw-r--r--packages/db/src/core/cli/migration-queries.ts539
-rw-r--r--packages/db/src/core/cli/print-help.ts69
-rw-r--r--packages/db/src/core/cli/types.ts5
-rw-r--r--packages/db/src/core/consts.ts19
-rw-r--r--packages/db/src/core/errors.ts50
-rw-r--r--packages/db/src/core/integration/error-map.ts104
-rw-r--r--packages/db/src/core/integration/file-url.ts95
-rw-r--r--packages/db/src/core/integration/index.ts236
-rw-r--r--packages/db/src/core/integration/typegen.ts26
-rw-r--r--packages/db/src/core/integration/vite-plugin-db.ts238
-rw-r--r--packages/db/src/core/load-file.ts206
-rw-r--r--packages/db/src/core/queries.ts206
-rw-r--r--packages/db/src/core/schemas.ts247
-rw-r--r--packages/db/src/core/types.ts102
-rw-r--r--packages/db/src/core/utils.ts80
-rw-r--r--packages/db/src/index.ts3
-rw-r--r--packages/db/src/runtime/db-client.ts254
-rw-r--r--packages/db/src/runtime/errors.ts29
-rw-r--r--packages/db/src/runtime/index.ts157
-rw-r--r--packages/db/src/runtime/types.ts109
-rw-r--r--packages/db/src/runtime/utils.ts71
-rw-r--r--packages/db/src/runtime/virtual.ts88
-rw-r--r--packages/db/src/utils.ts14
31 files changed, 3776 insertions, 0 deletions
diff --git a/packages/db/src/core/cli/commands/execute/index.ts b/packages/db/src/core/cli/commands/execute/index.ts
new file mode 100644
index 000000000..053736291
--- /dev/null
+++ b/packages/db/src/core/cli/commands/execute/index.ts
@@ -0,0 +1,70 @@
+import { existsSync } from 'node:fs';
+import type { AstroConfig } from 'astro';
+import { green } from 'kleur/colors';
+import type { Arguments } from 'yargs-parser';
+import { isDbError } from '../../../../runtime/utils.js';
+import {
+ EXEC_DEFAULT_EXPORT_ERROR,
+ EXEC_ERROR,
+ FILE_NOT_FOUND_ERROR,
+ MISSING_EXECUTE_PATH_ERROR,
+} from '../../../errors.js';
+import {
+ getLocalVirtualModContents,
+ getStudioVirtualModContents,
+} from '../../../integration/vite-plugin-db.js';
+import { bundleFile, importBundledFile } from '../../../load-file.js';
+import type { DBConfig } from '../../../types.js';
+import { getManagedRemoteToken } from '../../../utils.js';
+
+export async function cmd({
+ astroConfig,
+ dbConfig,
+ flags,
+}: {
+ astroConfig: AstroConfig;
+ dbConfig: DBConfig;
+ flags: Arguments;
+}) {
+ const filePath = flags._[4];
+ if (typeof filePath !== 'string') {
+ console.error(MISSING_EXECUTE_PATH_ERROR);
+ process.exit(1);
+ }
+
+ const fileUrl = new URL(filePath, astroConfig.root);
+ if (!existsSync(fileUrl)) {
+ console.error(FILE_NOT_FOUND_ERROR(filePath));
+ process.exit(1);
+ }
+
+ let virtualModContents: string;
+ if (flags.remote) {
+ const appToken = await getManagedRemoteToken(flags.token);
+ virtualModContents = getStudioVirtualModContents({
+ tables: dbConfig.tables ?? {},
+ appToken: appToken.token,
+ isBuild: false,
+ output: 'server',
+ });
+ } else {
+ virtualModContents = getLocalVirtualModContents({
+ tables: dbConfig.tables ?? {},
+ root: astroConfig.root,
+ });
+ }
+ const { code } = await bundleFile({ virtualModContents, root: astroConfig.root, fileUrl });
+
+ const mod = await importBundledFile({ code, root: astroConfig.root });
+ if (typeof mod.default !== 'function') {
+ console.error(EXEC_DEFAULT_EXPORT_ERROR(filePath));
+ process.exit(1);
+ }
+ try {
+ await mod.default();
+ console.info(`${green('✔')} File run successfully.`);
+ } catch (e) {
+ if (isDbError(e)) throw new Error(EXEC_ERROR(e.message));
+ else throw e;
+ }
+}
diff --git a/packages/db/src/core/cli/commands/link/index.ts b/packages/db/src/core/cli/commands/link/index.ts
new file mode 100644
index 000000000..bd07b7824
--- /dev/null
+++ b/packages/db/src/core/cli/commands/link/index.ts
@@ -0,0 +1,295 @@
+import { mkdir, writeFile } from 'node:fs/promises';
+import { homedir } from 'node:os';
+import { basename } from 'node:path';
+import {
+ MISSING_SESSION_ID_ERROR,
+ PROJECT_ID_FILE,
+ getAstroStudioUrl,
+ getSessionIdFromFile,
+} from '@astrojs/studio';
+import { slug } from 'github-slugger';
+import { bgRed, cyan } from 'kleur/colors';
+import prompts from 'prompts';
+import yoctoSpinner from 'yocto-spinner';
+import { safeFetch } from '../../../../runtime/utils.js';
+import type { Result } from '../../../utils.js';
+
+export async function cmd() {
+ const sessionToken = await getSessionIdFromFile();
+ if (!sessionToken) {
+ console.error(MISSING_SESSION_ID_ERROR);
+ process.exit(1);
+ }
+ await promptBegin();
+ const isLinkExisting = await promptLinkExisting();
+ if (isLinkExisting) {
+ const workspaceId = await promptWorkspace(sessionToken);
+ const existingProjectData = await promptExistingProjectName({ workspaceId });
+ return await linkProject(existingProjectData.id);
+ }
+
+ const isLinkNew = await promptLinkNew();
+ if (isLinkNew) {
+ const workspaceId = await promptWorkspace(sessionToken);
+ const newProjectName = await promptNewProjectName();
+ const newProjectRegion = await promptNewProjectRegion();
+ const spinner = yoctoSpinner({ text: 'Creating new project...' }).start();
+ const newProjectData = await createNewProject({
+ workspaceId,
+ name: newProjectName,
+ region: newProjectRegion,
+ });
+ // TODO(fks): Actually listen for project creation before continuing
+ // This is just a dumb spinner that roughly matches database creation time.
+ await new Promise((r) => setTimeout(r, 4000));
+ spinner.success('Project created!');
+ return await linkProject(newProjectData.id);
+ }
+}
+
+async function linkProject(id: string) {
+ await mkdir(new URL('.', PROJECT_ID_FILE), { recursive: true });
+ await writeFile(PROJECT_ID_FILE, `${id}`);
+ console.info('Project linked.');
+}
+
+async function getWorkspaces(sessionToken: string) {
+ const linkUrl = new URL(getAstroStudioUrl() + '/api/cli/workspaces.list');
+ const response = await safeFetch(
+ linkUrl,
+ {
+ method: 'POST',
+ headers: {
+ Authorization: `Bearer ${sessionToken}`,
+ 'Content-Type': 'application/json',
+ },
+ },
+ (res) => {
+ // Unauthorized
+ if (res.status === 401) {
+ throw new Error(
+ `${bgRed('Unauthorized')}\n\n Are you logged in?\n Run ${cyan(
+ 'astro login',
+ )} to authenticate and then try linking again.\n\n`,
+ );
+ }
+ throw new Error(`Failed to fetch user workspace: ${res.status} ${res.statusText}`);
+ },
+ );
+
+ const { data, success } = (await response.json()) as Result<{ id: string; name: string }[]>;
+ if (!success) {
+ throw new Error(`Failed to fetch user's workspace.`);
+ }
+ return data;
+}
+
+/**
+ * Get the workspace ID to link to.
+ * Prompts the user to choose if they have more than one workspace in Astro Studio.
+ * @returns A `Promise` for the workspace ID to use.
+ */
+async function promptWorkspace(sessionToken: string) {
+ const workspaces = await getWorkspaces(sessionToken);
+ if (workspaces.length === 0) {
+ console.error('No workspaces found.');
+ process.exit(1);
+ }
+
+ if (workspaces.length === 1) {
+ return workspaces[0].id;
+ }
+
+ const { workspaceId } = await prompts({
+ type: 'autocomplete',
+ name: 'workspaceId',
+ message: 'Select your workspace:',
+ limit: 5,
+ choices: workspaces.map((w) => ({ title: w.name, value: w.id })),
+ });
+ if (typeof workspaceId !== 'string') {
+ console.log('Canceled.');
+ process.exit(0);
+ }
+ return workspaceId;
+}
+
+export async function createNewProject({
+ workspaceId,
+ name,
+ region,
+}: {
+ workspaceId: string;
+ name: string;
+ region: string;
+}) {
+ const linkUrl = new URL(getAstroStudioUrl() + '/api/cli/projects.create');
+ const response = await safeFetch(
+ linkUrl,
+ {
+ method: 'POST',
+ headers: {
+ Authorization: `Bearer ${await getSessionIdFromFile()}`,
+ 'Content-Type': 'application/json',
+ },
+ body: JSON.stringify({ workspaceId, name, region }),
+ },
+ (res) => {
+ // Unauthorized
+ if (res.status === 401) {
+ console.error(
+ `${bgRed('Unauthorized')}\n\n Are you logged in?\n Run ${cyan(
+ 'astro login',
+ )} to authenticate and then try linking again.\n\n`,
+ );
+ process.exit(1);
+ }
+ console.error(`Failed to create project: ${res.status} ${res.statusText}`);
+ process.exit(1);
+ },
+ );
+
+ const { data, success } = (await response.json()) as Result<{ id: string; idName: string }>;
+ if (!success) {
+ console.error(`Failed to create project.`);
+ process.exit(1);
+ }
+ return { id: data.id, idName: data.idName };
+}
+
+export async function promptExistingProjectName({ workspaceId }: { workspaceId: string }) {
+ const linkUrl = new URL(getAstroStudioUrl() + '/api/cli/projects.list');
+ const response = await safeFetch(
+ linkUrl,
+ {
+ method: 'POST',
+ headers: {
+ Authorization: `Bearer ${await getSessionIdFromFile()}`,
+ 'Content-Type': 'application/json',
+ },
+ body: JSON.stringify({ workspaceId }),
+ },
+ (res) => {
+ if (res.status === 401) {
+ console.error(
+ `${bgRed('Unauthorized')}\n\n Are you logged in?\n Run ${cyan(
+ 'astro login',
+ )} to authenticate and then try linking again.\n\n`,
+ );
+ process.exit(1);
+ }
+ console.error(`Failed to fetch projects: ${res.status} ${res.statusText}`);
+ process.exit(1);
+ },
+ );
+
+ const { data, success } = (await response.json()) as Result<
+ { id: string; name: string; idName: string }[]
+ >;
+ if (!success) {
+ console.error(`Failed to fetch projects.`);
+ process.exit(1);
+ }
+ const { projectId } = await prompts({
+ type: 'autocomplete',
+ name: 'projectId',
+ message: 'What is your project name?',
+ limit: 5,
+ choices: data.map((p) => ({ title: p.name, value: p.id })),
+ });
+ if (typeof projectId !== 'string') {
+ console.log('Canceled.');
+ process.exit(0);
+ }
+ const selectedProjectData = data.find((p: any) => p.id === projectId)!;
+ return selectedProjectData;
+}
+
+export async function promptBegin(): Promise<void> {
+ // Get the current working directory relative to the user's home directory
+ const prettyCwd = process.cwd().replace(homedir(), '~');
+
+ // prompt
+ const { begin } = await prompts({
+ type: 'confirm',
+ name: 'begin',
+ message: `Link "${prettyCwd}" with Astro Studio?`,
+ initial: true,
+ });
+ if (!begin) {
+ console.log('Canceled.');
+ process.exit(0);
+ }
+}
+
+/**
+ * Ask the user if they want to link to an existing Astro Studio project.
+ * @returns A `Promise` for the user’s answer: `true` if they answer yes, otherwise `false`.
+ */
+export async function promptLinkExisting(): Promise<boolean> {
+ // prompt
+ const { linkExisting } = await prompts({
+ type: 'confirm',
+ name: 'linkExisting',
+ message: `Link with an existing project in Astro Studio?`,
+ initial: true,
+ });
+ return !!linkExisting;
+}
+
+/**
+ * Ask the user if they want to link to a new Astro Studio Project.
+ * **Exits the process if they answer no.**
+ * @returns A `Promise` for the user’s answer: `true` if they answer yes.
+ */
+export async function promptLinkNew(): Promise<boolean> {
+ // prompt
+ const { linkNew } = await prompts({
+ type: 'confirm',
+ name: 'linkNew',
+ message: `Create a new project in Astro Studio?`,
+ initial: true,
+ });
+ if (!linkNew) {
+ console.log('Canceled.');
+ process.exit(0);
+ }
+ return true;
+}
+
+export async function promptNewProjectName(): Promise<string> {
+ const { newProjectName } = await prompts({
+ type: 'text',
+ name: 'newProjectName',
+ message: `What is your new project's name?`,
+ initial: basename(process.cwd()),
+ format: (val) => slug(val),
+ });
+ if (!newProjectName) {
+ console.log('Canceled.');
+ process.exit(0);
+ }
+ return newProjectName;
+}
+
+export async function promptNewProjectRegion(): Promise<string> {
+ const { newProjectRegion } = await prompts({
+ type: 'select',
+ name: 'newProjectRegion',
+ message: `Where should your new database live?`,
+ choices: [
+ { title: 'North America (East)', value: 'NorthAmericaEast' },
+ { title: 'North America (West)', value: 'NorthAmericaWest' },
+ { title: 'Europe (Amsterdam)', value: 'EuropeCentral' },
+ { title: 'South America (Brazil)', value: 'SouthAmericaEast' },
+ { title: 'Asia (India)', value: 'AsiaSouth' },
+ { title: 'Asia (Japan)', value: 'AsiaNorthEast' },
+ ],
+ initial: 0,
+ });
+ if (!newProjectRegion) {
+ console.log('Canceled.');
+ process.exit(0);
+ }
+ return newProjectRegion;
+}
diff --git a/packages/db/src/core/cli/commands/login/index.ts b/packages/db/src/core/cli/commands/login/index.ts
new file mode 100644
index 000000000..0b0979384
--- /dev/null
+++ b/packages/db/src/core/cli/commands/login/index.ts
@@ -0,0 +1,96 @@
+import { mkdir, writeFile } from 'node:fs/promises';
+import { createServer as _createServer } from 'node:http';
+import { SESSION_LOGIN_FILE, getAstroStudioUrl } from '@astrojs/studio';
+import type { AstroConfig } from 'astro';
+import { listen } from 'async-listen';
+import { cyan } from 'kleur/colors';
+import open from 'open';
+import prompt from 'prompts';
+import type { Arguments } from 'yargs-parser';
+import yoctoSpinner from 'yocto-spinner';
+import type { DBConfig } from '../../../types.js';
+
+const isWebContainer =
+ // Stackblitz heuristic
+ process.versions?.webcontainer ??
+ // GitHub Codespaces heuristic
+ process.env.CODESPACE_NAME;
+
+export async function cmd({
+ flags,
+}: {
+ astroConfig: AstroConfig;
+ dbConfig: DBConfig;
+ flags: Arguments;
+}) {
+ let session = flags.session;
+
+ if (!session && isWebContainer) {
+ console.log(`Please visit the following URL in your web browser:`);
+ console.log(cyan(`${getAstroStudioUrl()}/auth/cli/login`));
+ console.log(`After login in complete, enter the verification code displayed:`);
+ const response = await prompt({
+ type: 'text',
+ name: 'session',
+ message: 'Verification code:',
+ });
+ if (!response.session) {
+ console.error('Cancelling login.');
+ process.exit(0);
+ }
+ session = response.session;
+ console.log('Successfully logged in');
+ } else if (!session) {
+ const { url, promise } = await createServer();
+ const loginUrl = new URL('/auth/cli/login', getAstroStudioUrl());
+ loginUrl.searchParams.set('returnTo', url);
+ console.log(`Opening the following URL in your browser...`);
+ console.log(cyan(loginUrl.href));
+ console.log(`If something goes wrong, copy-and-paste the URL into your browser.`);
+ open(loginUrl.href);
+ const spinner = yoctoSpinner({ text: 'Waiting for confirmation...' });
+ session = await promise;
+ spinner.success('Successfully logged in');
+ }
+
+ await mkdir(new URL('.', SESSION_LOGIN_FILE), { recursive: true });
+ await writeFile(SESSION_LOGIN_FILE, `${session}`);
+}
+
+// NOTE(fks): How the Astro CLI login process works:
+// 1. The Astro CLI creates a temporary server to listen for the session token
+// 2. The user is directed to studio.astro.build/ to login
+// 3. The user is redirected back to the temporary server with their session token
+// 4. The temporary server receives and saves the session token, logging the user in
+// 5. The user is redirected one last time to a success/failure page
+async function createServer(): Promise<{ url: string; promise: Promise<string> }> {
+ let resolve: (value: string | PromiseLike<string>) => void, reject: (reason?: Error) => void;
+
+ const server = _createServer((req, res) => {
+ // Handle the request
+ const url = new URL(req.url ?? '/', `http://${req.headers.host}`);
+ const sessionParam = url.searchParams.get('session');
+ // Handle the response & resolve the promise
+ res.statusCode = 302;
+ if (!sessionParam) {
+ res.setHeader('location', getAstroStudioUrl() + '/auth/cli/error');
+ reject(new Error('Failed to log in'));
+ } else {
+ res.setHeader('location', getAstroStudioUrl() + '/auth/cli/success');
+ resolve(sessionParam);
+ }
+ res.end();
+ });
+
+ const { port } = await listen(server, 0, '127.0.0.1');
+ const serverUrl = `http://localhost:${port}`;
+ const sessionPromise = new Promise<string>((_resolve, _reject) => {
+ resolve = _resolve;
+ reject = _reject;
+ }).finally(() => {
+ server.closeAllConnections();
+ server.close();
+ });
+
+ return { url: serverUrl, promise: sessionPromise };
+}
diff --git a/packages/db/src/core/cli/commands/logout/index.ts b/packages/db/src/core/cli/commands/logout/index.ts
new file mode 100644
index 000000000..8b7878659
--- /dev/null
+++ b/packages/db/src/core/cli/commands/logout/index.ts
@@ -0,0 +1,7 @@
+import { unlink } from 'node:fs/promises';
+import { SESSION_LOGIN_FILE } from '@astrojs/studio';
+
+export async function cmd() {
+ await unlink(SESSION_LOGIN_FILE);
+ console.log('Successfully logged out of Astro Studio.');
+}
diff --git a/packages/db/src/core/cli/commands/push/index.ts b/packages/db/src/core/cli/commands/push/index.ts
new file mode 100644
index 000000000..590d4f06e
--- /dev/null
+++ b/packages/db/src/core/cli/commands/push/index.ts
@@ -0,0 +1,173 @@
+import type { AstroConfig } from 'astro';
+import { sql } from 'drizzle-orm';
+import prompts from 'prompts';
+import type { Arguments } from 'yargs-parser';
+import { createRemoteDatabaseClient } from '../../../../runtime/index.js';
+import { safeFetch } from '../../../../runtime/utils.js';
+import { MIGRATION_VERSION } from '../../../consts.js';
+import type { DBConfig, DBSnapshot } from '../../../types.js';
+import {
+ type RemoteDatabaseInfo,
+ type Result,
+ getManagedRemoteToken,
+ getRemoteDatabaseInfo,
+} from '../../../utils.js';
+import {
+ createCurrentSnapshot,
+ createEmptySnapshot,
+ formatDataLossMessage,
+ getMigrationQueries,
+ getProductionCurrentSnapshot,
+} from '../../migration-queries.js';
+
+export async function cmd({
+ dbConfig,
+ flags,
+}: {
+ astroConfig: AstroConfig;
+ dbConfig: DBConfig;
+ flags: Arguments;
+}) {
+ const isDryRun = flags.dryRun;
+ const isForceReset = flags.forceReset;
+ const dbInfo = getRemoteDatabaseInfo();
+ const appToken = await getManagedRemoteToken(flags.token, dbInfo);
+ const productionSnapshot = await getProductionCurrentSnapshot({
+ dbInfo,
+ appToken: appToken.token,
+ });
+ const currentSnapshot = createCurrentSnapshot(dbConfig);
+ const isFromScratch = !productionSnapshot;
+ const { queries: migrationQueries, confirmations } = await getMigrationQueries({
+ oldSnapshot: isFromScratch ? createEmptySnapshot() : productionSnapshot,
+ newSnapshot: currentSnapshot,
+ reset: isForceReset,
+ });
+
+ // // push the database schema
+ if (migrationQueries.length === 0) {
+ console.log('Database schema is up to date.');
+ } else {
+ console.log(`Database schema is out of date.`);
+ }
+
+ if (isForceReset) {
+ const { begin } = await prompts({
+ type: 'confirm',
+ name: 'begin',
+ message: `Reset your database? All of your data will be erased and your schema created from scratch.`,
+ initial: false,
+ });
+
+ if (!begin) {
+ console.log('Canceled.');
+ process.exit(0);
+ }
+
+ console.log(`Force-pushing to the database. All existing data will be erased.`);
+ } else if (confirmations.length > 0) {
+ console.log('\n' + formatDataLossMessage(confirmations) + '\n');
+ throw new Error('Exiting.');
+ }
+
+ if (isDryRun) {
+ console.log('Statements:', JSON.stringify(migrationQueries, undefined, 2));
+ } else {
+ console.log(`Pushing database schema updates...`);
+ await pushSchema({
+ statements: migrationQueries,
+ dbInfo,
+ appToken: appToken.token,
+ isDryRun,
+ currentSnapshot: currentSnapshot,
+ });
+ }
+ // cleanup and exit
+ await appToken.destroy();
+ console.info('Push complete!');
+}
+
+async function pushSchema({
+ statements,
+ dbInfo,
+ appToken,
+ isDryRun,
+ currentSnapshot,
+}: {
+ statements: string[];
+ dbInfo: RemoteDatabaseInfo;
+ appToken: string;
+ isDryRun: boolean;
+ currentSnapshot: DBSnapshot;
+}) {
+ const requestBody: RequestBody = {
+ snapshot: currentSnapshot,
+ sql: statements,
+ version: MIGRATION_VERSION,
+ };
+ if (isDryRun) {
+ console.info('[DRY RUN] Batch query:', JSON.stringify(requestBody, null, 2));
+ return new Response(null, { status: 200 });
+ }
+
+ return dbInfo.type === 'studio'
+ ? pushToStudio(requestBody, appToken, dbInfo.url)
+ : pushToDb(requestBody, appToken, dbInfo.url);
+}
+
+type RequestBody = {
+ snapshot: DBSnapshot;
+ sql: string[];
+ version: string;
+};
+
+async function pushToDb(requestBody: RequestBody, appToken: string, remoteUrl: string) {
+ const client = createRemoteDatabaseClient({
+ dbType: 'libsql',
+ appToken,
+ remoteUrl,
+ });
+
+ await client.run(sql`create table if not exists _astro_db_snapshot (
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
+ version TEXT,
+ snapshot BLOB
+ );`);
+
+ await client.transaction(async (tx) => {
+ for (const stmt of requestBody.sql) {
+ await tx.run(sql.raw(stmt));
+ }
+
+ await tx.run(sql`insert into _astro_db_snapshot (version, snapshot) values (
+ ${requestBody.version},
+ ${JSON.stringify(requestBody.snapshot)}
+ )`);
+ });
+}
+
+async function pushToStudio(requestBody: RequestBody, appToken: string, remoteUrl: string) {
+ const url = new URL('/db/push', remoteUrl);
+ const response = await safeFetch(
+ url,
+ {
+ method: 'POST',
+ headers: new Headers({
+ Authorization: `Bearer ${appToken}`,
+ }),
+ body: JSON.stringify(requestBody),
+ },
+ async (res) => {
+ console.error(`${url.toString()} failed: ${res.status} ${res.statusText}`);
+ console.error(await res.text());
+ throw new Error(`/db/push fetch failed: ${res.status} ${res.statusText}`);
+ },
+ );
+
+ const result = (await response.json()) as Result<never>;
+ if (!result.success) {
+ console.error(`${url.toString()} unsuccessful`);
+ console.error(await response.text());
+ throw new Error(`/db/push fetch unsuccessful`);
+ }
+}
diff --git a/packages/db/src/core/cli/commands/shell/index.ts b/packages/db/src/core/cli/commands/shell/index.ts
new file mode 100644
index 000000000..dcc54fc70
--- /dev/null
+++ b/packages/db/src/core/cli/commands/shell/index.ts
@@ -0,0 +1,48 @@
+import type { AstroConfig } from 'astro';
+import { sql } from 'drizzle-orm';
+import type { Arguments } from 'yargs-parser';
+import {
+ createLocalDatabaseClient,
+ createRemoteDatabaseClient,
+} from '../../../../runtime/db-client.js';
+import { normalizeDatabaseUrl } from '../../../../runtime/index.js';
+import { DB_PATH } from '../../../consts.js';
+import { SHELL_QUERY_MISSING_ERROR } from '../../../errors.js';
+import type { DBConfigInput } from '../../../types.js';
+import { getAstroEnv, getManagedRemoteToken, getRemoteDatabaseInfo } from '../../../utils.js';
+
+export async function cmd({
+ flags,
+ astroConfig,
+}: {
+ dbConfig: DBConfigInput;
+ astroConfig: AstroConfig;
+ flags: Arguments;
+}) {
+ const query = flags.query;
+ if (!query) {
+ console.error(SHELL_QUERY_MISSING_ERROR);
+ process.exit(1);
+ }
+ const dbInfo = getRemoteDatabaseInfo();
+ if (flags.remote) {
+ const appToken = await getManagedRemoteToken(flags.token, dbInfo);
+ const db = createRemoteDatabaseClient({
+ dbType: dbInfo.type,
+ remoteUrl: dbInfo.url,
+ appToken: appToken.token,
+ });
+ const result = await db.run(sql.raw(query));
+ await appToken.destroy();
+ console.log(result);
+ } else {
+ const { ASTRO_DATABASE_FILE } = getAstroEnv();
+ const dbUrl = normalizeDatabaseUrl(
+ ASTRO_DATABASE_FILE,
+ new URL(DB_PATH, astroConfig.root).href,
+ );
+ const db = createLocalDatabaseClient({ dbUrl, enableTransactions: dbInfo.type === 'libsql' });
+ const result = await db.run(sql.raw(query));
+ console.log(result);
+ }
+}
diff --git a/packages/db/src/core/cli/commands/verify/index.ts b/packages/db/src/core/cli/commands/verify/index.ts
new file mode 100644
index 000000000..35f489a80
--- /dev/null
+++ b/packages/db/src/core/cli/commands/verify/index.ts
@@ -0,0 +1,58 @@
+import type { AstroConfig } from 'astro';
+import type { Arguments } from 'yargs-parser';
+import type { DBConfig } from '../../../types.js';
+import { getManagedRemoteToken, getRemoteDatabaseInfo } from '../../../utils.js';
+import {
+ createCurrentSnapshot,
+ createEmptySnapshot,
+ formatDataLossMessage,
+ getMigrationQueries,
+ getProductionCurrentSnapshot,
+} from '../../migration-queries.js';
+
+export async function cmd({
+ dbConfig,
+ flags,
+}: {
+ astroConfig: AstroConfig;
+ dbConfig: DBConfig;
+ flags: Arguments;
+}) {
+ const isJson = flags.json;
+ const dbInfo = getRemoteDatabaseInfo();
+ const appToken = await getManagedRemoteToken(flags.token, dbInfo);
+ const productionSnapshot = await getProductionCurrentSnapshot({
+ dbInfo,
+ appToken: appToken.token,
+ });
+ const currentSnapshot = createCurrentSnapshot(dbConfig);
+ const { queries: migrationQueries, confirmations } = await getMigrationQueries({
+ oldSnapshot: productionSnapshot || createEmptySnapshot(),
+ newSnapshot: currentSnapshot,
+ });
+
+ const result = { exitCode: 0, message: '', code: '', data: undefined as unknown };
+ if (migrationQueries.length === 0) {
+ result.code = 'MATCH';
+ result.message = `Database schema is up to date.`;
+ } else {
+ result.code = 'NO_MATCH';
+ result.message = `Database schema is out of date.\nRun 'astro db push' to push up your latest changes.`;
+ }
+
+ if (confirmations.length > 0) {
+ result.code = 'DATA_LOSS';
+ result.exitCode = 1;
+ result.data = confirmations;
+ result.message = formatDataLossMessage(confirmations, !isJson);
+ }
+
+ if (isJson) {
+ console.log(JSON.stringify(result));
+ } else {
+ console.log(result.message);
+ }
+
+ await appToken.destroy();
+ process.exit(result.exitCode);
+}
diff --git a/packages/db/src/core/cli/index.ts b/packages/db/src/core/cli/index.ts
new file mode 100644
index 000000000..531b016a6
--- /dev/null
+++ b/packages/db/src/core/cli/index.ts
@@ -0,0 +1,82 @@
+import type { AstroConfig } from 'astro';
+import type { Arguments } from 'yargs-parser';
+import { resolveDbConfig } from '../load-file.js';
+import { printHelp } from './print-help.js';
+
+export async function cli({
+ flags,
+ config: astroConfig,
+}: {
+ flags: Arguments;
+ config: AstroConfig;
+}) {
+ const args = flags._ as string[];
+ // Most commands are `astro db foo`, but for now login/logout
+ // are also handled by this package, so first check if this is a db command.
+ const command = args[2] === 'db' ? args[3] : args[2];
+ const { dbConfig } = await resolveDbConfig(astroConfig);
+
+ switch (command) {
+ case 'shell': {
+ const { cmd } = await import('./commands/shell/index.js');
+ return await cmd({ astroConfig, dbConfig, flags });
+ }
+ case 'gen': {
+ console.log('"astro db gen" is no longer needed! Visit the docs for more information.');
+ return;
+ }
+ case 'sync': {
+ console.log('"astro db sync" is no longer needed! Visit the docs for more information.');
+ return;
+ }
+ case 'push': {
+ const { cmd } = await import('./commands/push/index.js');
+ return await cmd({ astroConfig, dbConfig, flags });
+ }
+ case 'verify': {
+ const { cmd } = await import('./commands/verify/index.js');
+ return await cmd({ astroConfig, dbConfig, flags });
+ }
+ case 'execute': {
+ const { cmd } = await import('./commands/execute/index.js');
+ return await cmd({ astroConfig, dbConfig, flags });
+ }
+ case 'login': {
+ const { cmd } = await import('./commands/login/index.js');
+ return await cmd({ astroConfig, dbConfig, flags });
+ }
+ case 'logout': {
+ const { cmd } = await import('./commands/logout/index.js');
+ return await cmd();
+ }
+ case 'link': {
+ const { cmd } = await import('./commands/link/index.js');
+ return await cmd();
+ }
+ default: {
+ if (command != null) {
+ console.error(`Unknown command: ${command}`);
+ }
+ printHelp({
+ commandName: 'astro db',
+ usage: '[command] [...flags]',
+ headline: ' ',
+ tables: {
+ Commands: [
+ ['push', 'Push table schema updates to Astro Studio.'],
+ ['verify', 'Test schema updates /w Astro Studio (good for CI).'],
+ [
+ 'astro db execute <file-path>',
+ 'Execute a ts/js file using astro:db. Use --remote to connect to Studio.',
+ ],
+ [
+ 'astro db shell --query <sql-string>',
+ 'Execute a SQL string. Use --remote to connect to Studio.',
+ ],
+ ],
+ },
+ });
+ return;
+ }
+ }
+}
diff --git a/packages/db/src/core/cli/migration-queries.ts b/packages/db/src/core/cli/migration-queries.ts
new file mode 100644
index 000000000..db3972d09
--- /dev/null
+++ b/packages/db/src/core/cli/migration-queries.ts
@@ -0,0 +1,539 @@
+import { stripVTControlCharacters } from 'node:util';
+import deepDiff from 'deep-diff';
+import { sql } from 'drizzle-orm';
+import { SQLiteAsyncDialect } from 'drizzle-orm/sqlite-core';
+import * as color from 'kleur/colors';
+import { customAlphabet } from 'nanoid';
+import { hasPrimaryKey } from '../../runtime/index.js';
+import { createRemoteDatabaseClient } from '../../runtime/index.js';
+import { isSerializedSQL } from '../../runtime/types.js';
+import { isDbError, safeFetch } from '../../runtime/utils.js';
+import { MIGRATION_VERSION } from '../consts.js';
+import { RENAME_COLUMN_ERROR, RENAME_TABLE_ERROR } from '../errors.js';
+import {
+ getCreateIndexQueries,
+ getCreateTableQuery,
+ getDropTableIfExistsQuery,
+ getModifiers,
+ getReferencesConfig,
+ hasDefault,
+ schemaTypeToSqlType,
+} from '../queries.js';
+import { columnSchema } from '../schemas.js';
+import type {
+ BooleanColumn,
+ ColumnType,
+ DBColumn,
+ DBColumns,
+ DBConfig,
+ DBSnapshot,
+ DateColumn,
+ JsonColumn,
+ NumberColumn,
+ ResolvedDBTable,
+ ResolvedDBTables,
+ ResolvedIndexes,
+ TextColumn,
+} from '../types.js';
+import type { RemoteDatabaseInfo, Result } from '../utils.js';
+
+const sqlite = new SQLiteAsyncDialect();
+const genTempTableName = customAlphabet('abcdefghijklmnopqrstuvwxyz', 10);
+
+export async function getMigrationQueries({
+ oldSnapshot,
+ newSnapshot,
+ reset = false,
+}: {
+ oldSnapshot: DBSnapshot;
+ newSnapshot: DBSnapshot;
+ reset?: boolean;
+}): Promise<{ queries: string[]; confirmations: string[] }> {
+ const queries: string[] = [];
+ const confirmations: string[] = [];
+
+ // When doing a reset, first create DROP TABLE statements, then treat everything
+ // else as creation.
+ if (reset) {
+ const currentSnapshot = oldSnapshot;
+ oldSnapshot = createEmptySnapshot();
+ queries.push(...getDropTableQueriesForSnapshot(currentSnapshot));
+ }
+
+ const addedTables = getAddedTables(oldSnapshot, newSnapshot);
+ const droppedTables = getDroppedTables(oldSnapshot, newSnapshot);
+ const notDeprecatedDroppedTables = Object.fromEntries(
+ Object.entries(droppedTables).filter(([, table]) => !table.deprecated),
+ );
+ if (!isEmpty(addedTables) && !isEmpty(notDeprecatedDroppedTables)) {
+ const oldTable = Object.keys(notDeprecatedDroppedTables)[0];
+ const newTable = Object.keys(addedTables)[0];
+ throw new Error(RENAME_TABLE_ERROR(oldTable, newTable));
+ }
+
+ for (const [tableName, table] of Object.entries(addedTables)) {
+ queries.push(getCreateTableQuery(tableName, table));
+ queries.push(...getCreateIndexQueries(tableName, table));
+ }
+
+ for (const [tableName] of Object.entries(droppedTables)) {
+ const dropQuery = `DROP TABLE ${sqlite.escapeName(tableName)}`;
+ queries.push(dropQuery);
+ }
+
+ for (const [tableName, newTable] of Object.entries(newSnapshot.schema)) {
+ const oldTable = oldSnapshot.schema[tableName];
+ if (!oldTable) continue;
+ const addedColumns = getAdded(oldTable.columns, newTable.columns);
+ const droppedColumns = getDropped(oldTable.columns, newTable.columns);
+ const notDeprecatedDroppedColumns = Object.fromEntries(
+ Object.entries(droppedColumns).filter(([, col]) => !col.schema.deprecated),
+ );
+ if (!isEmpty(addedColumns) && !isEmpty(notDeprecatedDroppedColumns)) {
+ throw new Error(
+ RENAME_COLUMN_ERROR(
+ `${tableName}.${Object.keys(addedColumns)[0]}`,
+ `${tableName}.${Object.keys(notDeprecatedDroppedColumns)[0]}`,
+ ),
+ );
+ }
+ const result = await getTableChangeQueries({
+ tableName,
+ oldTable,
+ newTable,
+ });
+ queries.push(...result.queries);
+ confirmations.push(...result.confirmations);
+ }
+ return { queries, confirmations };
+}
+
+export async function getTableChangeQueries({
+ tableName,
+ oldTable,
+ newTable,
+}: {
+ tableName: string;
+ oldTable: ResolvedDBTable;
+ newTable: ResolvedDBTable;
+}): Promise<{ queries: string[]; confirmations: string[] }> {
+ const queries: string[] = [];
+ const confirmations: string[] = [];
+ const updated = getUpdatedColumns(oldTable.columns, newTable.columns);
+ const added = getAdded(oldTable.columns, newTable.columns);
+ const dropped = getDropped(oldTable.columns, newTable.columns);
+ /** Any foreign key changes require a full table recreate */
+ const hasForeignKeyChanges = Boolean(deepDiff(oldTable.foreignKeys, newTable.foreignKeys));
+
+ if (!hasForeignKeyChanges && isEmpty(updated) && isEmpty(added) && isEmpty(dropped)) {
+ return {
+ queries: getChangeIndexQueries({
+ tableName,
+ oldIndexes: oldTable.indexes,
+ newIndexes: newTable.indexes,
+ }),
+ confirmations,
+ };
+ }
+
+ if (
+ !hasForeignKeyChanges &&
+ isEmpty(updated) &&
+ Object.values(dropped).every(canAlterTableDropColumn) &&
+ Object.values(added).every(canAlterTableAddColumn)
+ ) {
+ queries.push(
+ ...getAlterTableQueries(tableName, added, dropped),
+ ...getChangeIndexQueries({
+ tableName,
+ oldIndexes: oldTable.indexes,
+ newIndexes: newTable.indexes,
+ }),
+ );
+ return { queries, confirmations };
+ }
+
+ const dataLossCheck = canRecreateTableWithoutDataLoss(added, updated);
+ if (dataLossCheck.dataLoss) {
+ const { reason, columnName } = dataLossCheck;
+ const reasonMsgs: Record<DataLossReason, string> = {
+ 'added-required': `You added new required column '${color.bold(
+ tableName + '.' + columnName,
+ )}' with no default value.\n This cannot be executed on an existing table.`,
+ 'updated-type': `Updating existing column ${color.bold(
+ tableName + '.' + columnName,
+ )} to a new type that cannot be handled automatically.`,
+ };
+ confirmations.push(reasonMsgs[reason]);
+ }
+
+ const primaryKeyExists = Object.entries(newTable.columns).find(([, column]) =>
+ hasPrimaryKey(column),
+ );
+ const droppedPrimaryKey = Object.entries(dropped).find(([, column]) => hasPrimaryKey(column));
+
+ const recreateTableQueries = getRecreateTableQueries({
+ tableName,
+ newTable,
+ added,
+ hasDataLoss: dataLossCheck.dataLoss,
+ migrateHiddenPrimaryKey: !primaryKeyExists && !droppedPrimaryKey,
+ });
+ queries.push(...recreateTableQueries, ...getCreateIndexQueries(tableName, newTable));
+ return { queries, confirmations };
+}
+
+function getChangeIndexQueries({
+ tableName,
+ oldIndexes = {},
+ newIndexes = {},
+}: {
+ tableName: string;
+ oldIndexes?: ResolvedIndexes;
+ newIndexes?: ResolvedIndexes;
+}) {
+ const added = getAdded(oldIndexes, newIndexes);
+ const dropped = getDropped(oldIndexes, newIndexes);
+ const updated = getUpdated(oldIndexes, newIndexes);
+
+ Object.assign(dropped, updated);
+ Object.assign(added, updated);
+
+ const queries: string[] = [];
+ for (const indexName of Object.keys(dropped)) {
+ const dropQuery = `DROP INDEX ${sqlite.escapeName(indexName)}`;
+ queries.push(dropQuery);
+ }
+ queries.push(...getCreateIndexQueries(tableName, { indexes: added }));
+ return queries;
+}
+
+function getAddedTables(oldTables: DBSnapshot, newTables: DBSnapshot): ResolvedDBTables {
+ const added: ResolvedDBTables = {};
+ for (const [key, newTable] of Object.entries(newTables.schema)) {
+ if (!(key in oldTables.schema)) added[key] = newTable;
+ }
+ return added;
+}
+
+function getDroppedTables(oldTables: DBSnapshot, newTables: DBSnapshot): ResolvedDBTables {
+ const dropped: ResolvedDBTables = {};
+ for (const [key, oldTable] of Object.entries(oldTables.schema)) {
+ if (!(key in newTables.schema)) dropped[key] = oldTable;
+ }
+ return dropped;
+}
+
+/**
+ * Get ALTER TABLE queries to update the table schema. Assumes all added and dropped columns pass
+ * `canUseAlterTableAddColumn` and `canAlterTableDropColumn` checks!
+ */
+function getAlterTableQueries(
+ unescTableName: string,
+ added: DBColumns,
+ dropped: DBColumns,
+): string[] {
+ const queries: string[] = [];
+ const tableName = sqlite.escapeName(unescTableName);
+
+ for (const [unescColumnName, column] of Object.entries(added)) {
+ const columnName = sqlite.escapeName(unescColumnName);
+ const type = schemaTypeToSqlType(column.type);
+ const q = `ALTER TABLE ${tableName} ADD COLUMN ${columnName} ${type}${getModifiers(
+ columnName,
+ column,
+ )}`;
+ queries.push(q);
+ }
+
+ for (const unescColumnName of Object.keys(dropped)) {
+ const columnName = sqlite.escapeName(unescColumnName);
+ const q = `ALTER TABLE ${tableName} DROP COLUMN ${columnName}`;
+ queries.push(q);
+ }
+
+ return queries;
+}
+
+function getRecreateTableQueries({
+ tableName: unescTableName,
+ newTable,
+ added,
+ hasDataLoss,
+ migrateHiddenPrimaryKey,
+}: {
+ tableName: string;
+ newTable: ResolvedDBTable;
+ added: Record<string, DBColumn>;
+ hasDataLoss: boolean;
+ migrateHiddenPrimaryKey: boolean;
+}): string[] {
+ const unescTempName = `${unescTableName}_${genTempTableName()}`;
+ const tempName = sqlite.escapeName(unescTempName);
+ const tableName = sqlite.escapeName(unescTableName);
+
+ if (hasDataLoss) {
+ return [`DROP TABLE ${tableName}`, getCreateTableQuery(unescTableName, newTable)];
+ }
+ const newColumns = [...Object.keys(newTable.columns)];
+ if (migrateHiddenPrimaryKey) {
+ newColumns.unshift('_id');
+ }
+ const escapedColumns = newColumns
+ .filter((i) => !(i in added))
+ .map((c) => sqlite.escapeName(c))
+ .join(', ');
+
+ return [
+ getCreateTableQuery(unescTempName, newTable),
+ `INSERT INTO ${tempName} (${escapedColumns}) SELECT ${escapedColumns} FROM ${tableName}`,
+ `DROP TABLE ${tableName}`,
+ `ALTER TABLE ${tempName} RENAME TO ${tableName}`,
+ ];
+}
+
+function isEmpty(obj: Record<string, unknown>) {
+ return Object.keys(obj).length === 0;
+}
+
+/**
+ * ADD COLUMN is preferred for O(1) table updates, but is only supported for _some_ column
+ * definitions.
+ *
+ * @see https://www.sqlite.org/lang_altertable.html#alter_table_add_column
+ */
+function canAlterTableAddColumn(column: DBColumn) {
+ if (column.schema.unique) return false;
+ if (hasRuntimeDefault(column)) return false;
+ if (!column.schema.optional && !hasDefault(column)) return false;
+ if (hasPrimaryKey(column)) return false;
+ if (getReferencesConfig(column)) return false;
+ return true;
+}
+
+function canAlterTableDropColumn(column: DBColumn) {
+ if (column.schema.unique) return false;
+ if (hasPrimaryKey(column)) return false;
+ return true;
+}
+
+type DataLossReason = 'added-required' | 'updated-type';
+type DataLossResponse =
+ | { dataLoss: false }
+ | { dataLoss: true; columnName: string; reason: DataLossReason };
+
+function canRecreateTableWithoutDataLoss(
+ added: DBColumns,
+ updated: UpdatedColumns,
+): DataLossResponse {
+ for (const [columnName, a] of Object.entries(added)) {
+ if (hasPrimaryKey(a) && a.type !== 'number' && !hasDefault(a)) {
+ return { dataLoss: true, columnName, reason: 'added-required' };
+ }
+ if (!a.schema.optional && !hasDefault(a)) {
+ return { dataLoss: true, columnName, reason: 'added-required' };
+ }
+ }
+ for (const [columnName, u] of Object.entries(updated)) {
+ if (u.old.type !== u.new.type && !canChangeTypeWithoutQuery(u.old, u.new)) {
+ return { dataLoss: true, columnName, reason: 'updated-type' };
+ }
+ }
+ return { dataLoss: false };
+}
+
+function getAdded<T>(oldObj: Record<string, T>, newObj: Record<string, T>) {
+ const added: Record<string, T> = {};
+ for (const [key, value] of Object.entries(newObj)) {
+ if (!(key in oldObj)) added[key] = value;
+ }
+ return added;
+}
+
+function getDropped<T>(oldObj: Record<string, T>, newObj: Record<string, T>) {
+ const dropped: Record<string, T> = {};
+ for (const [key, value] of Object.entries(oldObj)) {
+ if (!(key in newObj)) dropped[key] = value;
+ }
+ return dropped;
+}
+
+function getUpdated<T>(oldObj: Record<string, T>, newObj: Record<string, T>) {
+ const updated: Record<string, T> = {};
+ for (const [key, value] of Object.entries(newObj)) {
+ const oldValue = oldObj[key];
+ if (!oldValue) continue;
+ if (deepDiff(oldValue, value)) updated[key] = value;
+ }
+ return updated;
+}
+
+type UpdatedColumns = Record<string, { old: DBColumn; new: DBColumn }>;
+
+function getUpdatedColumns(oldColumns: DBColumns, newColumns: DBColumns): UpdatedColumns {
+ const updated: UpdatedColumns = {};
+ for (const [key, newColumn] of Object.entries(newColumns)) {
+ let oldColumn = oldColumns[key];
+ if (!oldColumn) continue;
+
+ if (oldColumn.type !== newColumn.type && canChangeTypeWithoutQuery(oldColumn, newColumn)) {
+ // If we can safely change the type without a query,
+ // try parsing the old schema as the new schema.
+ // This lets us diff the columns as if they were the same type.
+ const asNewColumn = columnSchema.safeParse({
+ type: newColumn.type,
+ schema: oldColumn.schema,
+ });
+ if (asNewColumn.success) {
+ oldColumn = asNewColumn.data;
+ }
+ // If parsing fails, move on to the standard diff.
+ }
+
+ const diff = deepDiff(oldColumn, newColumn);
+
+ if (diff) {
+ updated[key] = { old: oldColumn, new: newColumn };
+ }
+ }
+ return updated;
+}
+const typeChangesWithoutQuery: Array<{ from: ColumnType; to: ColumnType }> = [
+ { from: 'boolean', to: 'number' },
+ { from: 'date', to: 'text' },
+ { from: 'json', to: 'text' },
+];
+
+function canChangeTypeWithoutQuery(oldColumn: DBColumn, newColumn: DBColumn) {
+ return typeChangesWithoutQuery.some(
+ ({ from, to }) => oldColumn.type === from && newColumn.type === to,
+ );
+}
+
+// Using `DBColumn` will not narrow `default` based on the column `type`
+// Handle each column separately
+type WithDefaultDefined<T extends DBColumn> = T & Required<Pick<T['schema'], 'default'>>;
+type DBColumnWithDefault =
+ | WithDefaultDefined<TextColumn>
+ | WithDefaultDefined<DateColumn>
+ | WithDefaultDefined<NumberColumn>
+ | WithDefaultDefined<BooleanColumn>
+ | WithDefaultDefined<JsonColumn>;
+
+function hasRuntimeDefault(column: DBColumn): column is DBColumnWithDefault {
+ return !!(column.schema.default && isSerializedSQL(column.schema.default));
+}
+
+export function getProductionCurrentSnapshot(options: {
+ dbInfo: RemoteDatabaseInfo;
+ appToken: string;
+}): Promise<DBSnapshot | undefined> {
+ return options.dbInfo.type === 'studio'
+ ? getStudioCurrentSnapshot(options.appToken, options.dbInfo.url)
+ : getDbCurrentSnapshot(options.appToken, options.dbInfo.url);
+}
+
+async function getDbCurrentSnapshot(
+ appToken: string,
+ remoteUrl: string,
+): Promise<DBSnapshot | undefined> {
+ const client = createRemoteDatabaseClient({
+ dbType: 'libsql',
+ appToken,
+ remoteUrl,
+ });
+
+ try {
+ const res = await client.get<{ snapshot: string }>(
+ // Latest snapshot
+ sql`select snapshot from _astro_db_snapshot order by id desc limit 1;`,
+ );
+
+ return JSON.parse(res.snapshot);
+ } catch (error) {
+ // Don't handle errors that are not from libSQL
+ if (
+ isDbError(error) &&
+ // If the schema was never pushed to the database yet the table won't exist.
+ // Treat a missing snapshot table as an empty table.
+
+ // When connecting to a remote database in that condition
+ // the query will fail with the following error code and message.
+ ((error.code === 'SQLITE_UNKNOWN' &&
+ error.message === 'SQLITE_UNKNOWN: SQLite error: no such table: _astro_db_snapshot') ||
+ // When connecting to a local or in-memory database that does not have a snapshot table yet
+ // the query will fail with the following error code and message.
+ (error.code === 'SQLITE_ERROR' &&
+ error.message === 'SQLITE_ERROR: no such table: _astro_db_snapshot'))
+ ) {
+ return;
+ }
+
+ throw error;
+ }
+}
+
+async function getStudioCurrentSnapshot(
+ appToken: string,
+ remoteUrl: string,
+): Promise<DBSnapshot | undefined> {
+ const url = new URL('/db/schema', remoteUrl);
+
+ const response = await safeFetch(
+ url,
+ {
+ method: 'POST',
+ headers: new Headers({
+ Authorization: `Bearer ${appToken}`,
+ }),
+ },
+ async (res) => {
+ console.error(`${url.toString()} failed: ${res.status} ${res.statusText}`);
+ console.error(await res.text());
+ throw new Error(`/db/schema fetch failed: ${res.status} ${res.statusText}`);
+ },
+ );
+
+ const result = (await response.json()) as Result<DBSnapshot>;
+ if (!result.success) {
+ console.error(`${url.toString()} unsuccessful`);
+ console.error(await response.text());
+ throw new Error(`/db/schema fetch unsuccessful`);
+ }
+ return result.data;
+}
+
+function getDropTableQueriesForSnapshot(snapshot: DBSnapshot) {
+ const queries = [];
+ for (const tableName of Object.keys(snapshot.schema)) {
+ const dropQuery = getDropTableIfExistsQuery(tableName);
+ queries.unshift(dropQuery);
+ }
+ return queries;
+}
+
+export function createCurrentSnapshot({ tables = {} }: DBConfig): DBSnapshot {
+ const schema = JSON.parse(JSON.stringify(tables));
+ return { version: MIGRATION_VERSION, schema };
+}
+
+export function createEmptySnapshot(): DBSnapshot {
+ return { version: MIGRATION_VERSION, schema: {} };
+}
+
+export function formatDataLossMessage(confirmations: string[], isColor = true): string {
+ const messages = [];
+ messages.push(color.red('✖ We found some schema changes that cannot be handled automatically:'));
+ messages.push(``);
+ messages.push(...confirmations.map((m, i) => color.red(` (${i + 1}) `) + m));
+ messages.push(``);
+ messages.push(`To resolve, revert these changes or update your schema, and re-run the command.`);
+ messages.push(
+ `You may also run 'astro db push --force-reset' to ignore all warnings and force-push your local database schema to production instead. All data will be lost and the database will be reset.`,
+ );
+ let finalMessage = messages.join('\n');
+ if (!isColor) {
+ finalMessage = stripVTControlCharacters(finalMessage);
+ }
+ return finalMessage;
+}
diff --git a/packages/db/src/core/cli/print-help.ts b/packages/db/src/core/cli/print-help.ts
new file mode 100644
index 000000000..4082380b2
--- /dev/null
+++ b/packages/db/src/core/cli/print-help.ts
@@ -0,0 +1,69 @@
+import { bgGreen, bgWhite, black, bold, dim, green } from 'kleur/colors';
+
+/**
+ * Uses implementation from Astro core
+ * @see https://github.com/withastro/astro/blob/main/packages/astro/src/core/messages.ts#L303
+ */
+export function printHelp({
+ commandName,
+ headline,
+ usage,
+ tables,
+ description,
+}: {
+ commandName: string;
+ headline?: string;
+ usage?: string;
+ tables?: Record<string, [command: string, help: string][]>;
+ description?: string;
+}) {
+ const linebreak = () => '';
+ const title = (label: string) => ` ${bgWhite(black(` ${label} `))}`;
+ const table = (rows: [string, string][], { padding }: { padding: number }) => {
+ const split = process.stdout.columns < 60;
+ let raw = '';
+
+ for (const row of rows) {
+ if (split) {
+ raw += ` ${row[0]}\n `;
+ } else {
+ raw += `${`${row[0]}`.padStart(padding)}`;
+ }
+ raw += ' ' + dim(row[1]) + '\n';
+ }
+
+ return raw.slice(0, -1); // remove latest \n
+ };
+
+ let message = [];
+
+ if (headline) {
+ message.push(
+ linebreak(),
+ ` ${bgGreen(black(` ${commandName} `))} ${green(
+ `v${process.env.PACKAGE_VERSION ?? ''}`,
+ )} ${headline}`,
+ );
+ }
+
+ if (usage) {
+ message.push(linebreak(), ` ${green(commandName)} ${bold(usage)}`);
+ }
+
+ if (tables) {
+ function calculateTablePadding(rows: [string, string][]) {
+ return rows.reduce((val, [first]) => Math.max(val, first.length), 0) + 2;
+ }
+ const tableEntries = Object.entries(tables);
+ const padding = Math.max(...tableEntries.map(([, rows]) => calculateTablePadding(rows)));
+ for (const [tableTitle, tableRows] of tableEntries) {
+ message.push(linebreak(), title(tableTitle), table(tableRows, { padding }));
+ }
+ }
+
+ if (description) {
+ message.push(linebreak(), `${description}`);
+ }
+
+ console.log(message.join('\n') + '\n');
+}
diff --git a/packages/db/src/core/cli/types.ts b/packages/db/src/core/cli/types.ts
new file mode 100644
index 000000000..4294c3fb0
--- /dev/null
+++ b/packages/db/src/core/cli/types.ts
@@ -0,0 +1,5 @@
+export interface Arguments {
+ _: Array<string | number>;
+ '--'?: Array<string | number>;
+ [argName: string]: any;
+}
diff --git a/packages/db/src/core/consts.ts b/packages/db/src/core/consts.ts
new file mode 100644
index 000000000..703a91dca
--- /dev/null
+++ b/packages/db/src/core/consts.ts
@@ -0,0 +1,19 @@
+import { readFileSync } from 'node:fs';
+
+export const PACKAGE_NAME = JSON.parse(
+ readFileSync(new URL('../../package.json', import.meta.url), 'utf8'),
+).name;
+
+export const RUNTIME_IMPORT = JSON.stringify(`${PACKAGE_NAME}/runtime`);
+
+export const RUNTIME_VIRTUAL_IMPORT = JSON.stringify(`${PACKAGE_NAME}/dist/runtime/virtual.js`);
+
+export const DB_TYPES_FILE = 'db-types.d.ts';
+
+export const VIRTUAL_MODULE_ID = 'astro:db';
+
+export const DB_PATH = '.astro/content.db';
+
+export const CONFIG_FILE_NAMES = ['config.ts', 'config.js', 'config.mts', 'config.mjs'];
+
+export const MIGRATION_VERSION = '2024-03-12';
diff --git a/packages/db/src/core/errors.ts b/packages/db/src/core/errors.ts
new file mode 100644
index 000000000..5272fd3c2
--- /dev/null
+++ b/packages/db/src/core/errors.ts
@@ -0,0 +1,50 @@
+import { bold, cyan, red } from 'kleur/colors';
+
+export const MISSING_EXECUTE_PATH_ERROR = `${red(
+ '▶ No file path provided.',
+)} Provide a path by running ${cyan('astro db execute <path>')}\n`;
+
+export const RENAME_TABLE_ERROR = (oldTable: string, newTable: string) => {
+ return (
+ red('\u25B6 Potential table rename detected: ' + oldTable + ' -> ' + newTable) +
+ `
+ You cannot add and remove tables in the same schema update batch.
+
+ 1. Use "deprecated: true" to deprecate a table before renaming.
+ 2. Use "--force-reset" to ignore this warning and reset the database (deleting all of your data).
+
+ Visit https://docs.astro.build/en/guides/astro-db/#renaming-tables to learn more.`
+ );
+};
+
+export const RENAME_COLUMN_ERROR = (oldSelector: string, newSelector: string) => {
+ return (
+ red('▶ Potential column rename detected: ' + oldSelector + ', ' + newSelector) +
+ `\n You cannot add and remove columns in the same table.` +
+ `\n To resolve, add a 'deprecated: true' flag to '${oldSelector}' instead.`
+ );
+};
+
+export const FILE_NOT_FOUND_ERROR = (path: string) => `${red('▶ File not found:')} ${bold(path)}\n`;
+
+export const SHELL_QUERY_MISSING_ERROR = `${red(
+ '▶ Please provide a query to execute using the --query flag.',
+)}\n`;
+
+export const EXEC_ERROR = (error: string) => {
+ return `${red(`Error while executing file:`)}\n\n${error}`;
+};
+
+export const EXEC_DEFAULT_EXPORT_ERROR = (fileName: string) => {
+ return EXEC_ERROR(`Missing default function export in ${bold(fileName)}`);
+};
+
+export const INTEGRATION_TABLE_CONFLICT_ERROR = (
+ integrationName: string,
+ tableName: string,
+ isUserConflict: boolean,
+) => {
+ return red('▶ Conflicting table name in integration ' + bold(integrationName)) + isUserConflict
+ ? `\n A user-defined table named ${bold(tableName)} already exists`
+ : `\n Another integration already added a table named ${bold(tableName)}`;
+};
diff --git a/packages/db/src/core/integration/error-map.ts b/packages/db/src/core/integration/error-map.ts
new file mode 100644
index 000000000..d2697c9ca
--- /dev/null
+++ b/packages/db/src/core/integration/error-map.ts
@@ -0,0 +1,104 @@
+/**
+ * This is a modified version of Astro's error map. source:
+ * https://github.com/withastro/astro/blob/main/packages/astro/src/content/error-map.ts
+ */
+import type { z } from 'astro/zod';
+
+interface TypeOrLiteralErrByPathEntry {
+ code: 'invalid_type' | 'invalid_literal';
+ received: unknown;
+ expected: unknown[];
+}
+
+export const errorMap: z.ZodErrorMap = (baseError, ctx) => {
+ const baseErrorPath = flattenErrorPath(baseError.path);
+ if (baseError.code === 'invalid_union') {
+ // Optimization: Combine type and literal errors for keys that are common across ALL union types
+ // Ex. a union between `{ key: z.literal('tutorial') }` and `{ key: z.literal('blog') }` will
+ // raise a single error when `key` does not match:
+ // > Did not match union.
+ // > key: Expected `'tutorial' | 'blog'`, received 'foo'
+ const typeOrLiteralErrByPath = new Map<string, TypeOrLiteralErrByPathEntry>();
+ for (const unionError of baseError.unionErrors.flatMap((e) => e.errors)) {
+ if (unionError.code === 'invalid_type' || unionError.code === 'invalid_literal') {
+ const flattenedErrorPath = flattenErrorPath(unionError.path);
+ const typeOrLiteralErr = typeOrLiteralErrByPath.get(flattenedErrorPath);
+ if (typeOrLiteralErr) {
+ typeOrLiteralErr.expected.push(unionError.expected);
+ } else {
+ typeOrLiteralErrByPath.set(flattenedErrorPath, {
+ code: unionError.code,
+ received: (unionError as any).received,
+ expected: [unionError.expected],
+ });
+ }
+ }
+ }
+ const messages: string[] = [
+ prefix(
+ baseErrorPath,
+ typeOrLiteralErrByPath.size ? 'Did not match union:' : 'Did not match union.',
+ ),
+ ];
+ return {
+ message: messages
+ .concat(
+ [...typeOrLiteralErrByPath.entries()]
+ // If type or literal error isn't common to ALL union types,
+ // filter it out. Can lead to confusing noise.
+ .filter(([, error]) => error.expected.length === baseError.unionErrors.length)
+ .map(([key, error]) =>
+ // Avoid printing the key again if it's a base error
+ key === baseErrorPath
+ ? `> ${getTypeOrLiteralMsg(error)}`
+ : `> ${prefix(key, getTypeOrLiteralMsg(error))}`,
+ ),
+ )
+ .join('\n'),
+ };
+ }
+ if (baseError.code === 'invalid_literal' || baseError.code === 'invalid_type') {
+ return {
+ message: prefix(
+ baseErrorPath,
+ getTypeOrLiteralMsg({
+ code: baseError.code,
+ received: (baseError as any).received,
+ expected: [baseError.expected],
+ }),
+ ),
+ };
+ } else if (baseError.message) {
+ return { message: prefix(baseErrorPath, baseError.message) };
+ } else {
+ return { message: prefix(baseErrorPath, ctx.defaultError) };
+ }
+};
+
+const getTypeOrLiteralMsg = (error: TypeOrLiteralErrByPathEntry): string => {
+ if (error.received === 'undefined') return 'Required';
+ const expectedDeduped = new Set(error.expected);
+ switch (error.code) {
+ case 'invalid_type':
+ return `Expected type \`${unionExpectedVals(expectedDeduped)}\`, received ${JSON.stringify(
+ error.received,
+ )}`;
+ case 'invalid_literal':
+ return `Expected \`${unionExpectedVals(expectedDeduped)}\`, received ${JSON.stringify(
+ error.received,
+ )}`;
+ }
+};
+
+const prefix = (key: string, msg: string) => (key.length ? `**${key}**: ${msg}` : msg);
+
+const unionExpectedVals = (expectedVals: Set<unknown>) =>
+ [...expectedVals]
+ .map((expectedVal, idx) => {
+ if (idx === 0) return JSON.stringify(expectedVal);
+ const sep = ' | ';
+ return `${sep}${JSON.stringify(expectedVal)}`;
+ })
+ .join('');
+
+const flattenErrorPath = (errorPath: Array<string | number>) => errorPath.join('.');
diff --git a/packages/db/src/core/integration/file-url.ts b/packages/db/src/core/integration/file-url.ts
new file mode 100644
index 000000000..76ce70cb9
--- /dev/null
+++ b/packages/db/src/core/integration/file-url.ts
@@ -0,0 +1,95 @@
+import fs from 'node:fs';
+import path from 'node:path';
+import { pathToFileURL } from 'node:url';
+import type { AstroConfig, AstroIntegration } from 'astro';
+import type { VitePlugin } from '../utils.js';
+
+async function copyFile(toDir: URL, fromUrl: URL, toUrl: URL) {
+ await fs.promises.mkdir(toDir, { recursive: true });
+ await fs.promises.rename(fromUrl, toUrl);
+}
+
+export function fileURLIntegration(): AstroIntegration {
+ const fileNames: string[] = [];
+
+ function createVitePlugin(command: 'build' | 'preview' | 'dev' | 'sync'): VitePlugin {
+ let referenceIds: string[] = [];
+ return {
+ name: '@astrojs/db/file-url',
+ enforce: 'pre',
+ async load(id) {
+ if (id.endsWith('?fileurl')) {
+ const filePath = id.slice(0, id.indexOf('?'));
+ if (command === 'build') {
+ const data = await fs.promises.readFile(filePath);
+ const name = path.basename(filePath);
+ const referenceId = this.emitFile({
+ name,
+ source: data,
+ type: 'asset',
+ });
+ referenceIds.push(referenceId);
+ return `export default import.meta.ROLLUP_FILE_URL_${referenceId};`;
+ }
+ // dev mode
+ else {
+ return `export default new URL(${JSON.stringify(pathToFileURL(filePath).toString())})`;
+ }
+ }
+ },
+ generateBundle() {
+ // Save file names so we can copy them back over.
+ for (const referenceId of referenceIds) {
+ fileNames.push(this.getFileName(referenceId));
+ }
+ // Reset `referenceIds` for later generateBundle() runs.
+ // Prevents lookup for ids that have already been copied.
+ referenceIds = [];
+ },
+ };
+ }
+
+ let config: AstroConfig;
+ return {
+ name: '@astrojs/db/file-url',
+ hooks: {
+ 'astro:config:setup'({ updateConfig, command }) {
+ updateConfig({
+ vite: {
+ plugins: [createVitePlugin(command)],
+ },
+ });
+ },
+ 'astro:config:done': ({ config: _config }) => {
+ config = _config;
+ },
+ async 'astro:build:done'() {
+ if (config.output === 'static') {
+ // Delete the files since they are only used for the build process.
+ const unlinks: Promise<unknown>[] = [];
+ for (const fileName of fileNames) {
+ const url = new URL(fileName, config.outDir);
+ unlinks.push(fs.promises.unlink(url));
+ }
+ await Promise.all(unlinks);
+ // Delete the assets directory if it is empty.
+ // NOTE(fks): Ignore errors here because this is expected to fail
+ // if the directory contains files, or if it does not exist.
+ // If it errors for some unknown reason, it's not a big deal.
+ const assetDir = new URL(config.build.assets, config.outDir);
+ await fs.promises.rmdir(assetDir).catch(() => []);
+ } else {
+ // Move files back over to the dist output path
+ const moves: Promise<unknown>[] = [];
+ for (const fileName of fileNames) {
+ const fromUrl = new URL(fileName, config.build.client);
+ const toUrl = new URL(fileName, config.build.server);
+ const toDir = new URL('./', toUrl);
+ moves.push(copyFile(toDir, fromUrl, toUrl));
+ }
+ await Promise.all(moves);
+ }
+ },
+ },
+ };
+}
diff --git a/packages/db/src/core/integration/index.ts b/packages/db/src/core/integration/index.ts
new file mode 100644
index 000000000..36dc6b0f8
--- /dev/null
+++ b/packages/db/src/core/integration/index.ts
@@ -0,0 +1,236 @@
+import { existsSync } from 'node:fs';
+import { mkdir, writeFile } from 'node:fs/promises';
+import { dirname } from 'node:path';
+import { fileURLToPath } from 'node:url';
+import type { ManagedAppToken } from '@astrojs/studio';
+import type { AstroIntegration } from 'astro';
+import { blue, yellow } from 'kleur/colors';
+import {
+ type HMRPayload,
+ type UserConfig,
+ type ViteDevServer,
+ createServer,
+ loadEnv,
+ mergeConfig,
+} from 'vite';
+import parseArgs from 'yargs-parser';
+import { AstroDbError, isDbError } from '../../runtime/utils.js';
+import { CONFIG_FILE_NAMES, DB_PATH, VIRTUAL_MODULE_ID } from '../consts.js';
+import { EXEC_DEFAULT_EXPORT_ERROR, EXEC_ERROR } from '../errors.js';
+import { resolveDbConfig } from '../load-file.js';
+import { SEED_DEV_FILE_NAME } from '../queries.js';
+import { type VitePlugin, getDbDirectoryUrl, getManagedRemoteToken } from '../utils.js';
+import { fileURLIntegration } from './file-url.js';
+import { getDtsContent } from './typegen.js';
+import {
+ type LateSeedFiles,
+ type LateTables,
+ type SeedHandler,
+ vitePluginDb,
+} from './vite-plugin-db.js';
+
+function astroDBIntegration(): AstroIntegration {
+ let connectToRemote = false;
+ let configFileDependencies: string[] = [];
+ let root: URL;
+ let appToken: ManagedAppToken | undefined;
+ // Used during production builds to load seed files.
+ let tempViteServer: ViteDevServer | undefined;
+
+ // Make table loading "late" to pass to plugins from `config:setup`,
+ // but load during `config:done` to wait for integrations to settle.
+ let tables: LateTables = {
+ get() {
+ throw new Error('[astro:db] INTERNAL Tables not loaded yet');
+ },
+ };
+ let seedFiles: LateSeedFiles = {
+ get() {
+ throw new Error('[astro:db] INTERNAL Seed files not loaded yet');
+ },
+ };
+ let seedHandler: SeedHandler = {
+ execute: () => {
+ throw new Error('[astro:db] INTERNAL Seed handler not loaded yet');
+ },
+ inProgress: false,
+ };
+
+ let command: 'dev' | 'build' | 'preview' | 'sync';
+ let finalBuildOutput: string;
+ return {
+ name: 'astro:db',
+ hooks: {
+ 'astro:config:setup': async ({ updateConfig, config, command: _command, logger }) => {
+ command = _command;
+ root = config.root;
+
+ if (command === 'preview') return;
+
+ let dbPlugin: VitePlugin | undefined = undefined;
+ const args = parseArgs(process.argv.slice(3));
+ connectToRemote = process.env.ASTRO_INTERNAL_TEST_REMOTE || args['remote'];
+
+ if (connectToRemote) {
+ appToken = await getManagedRemoteToken();
+ dbPlugin = vitePluginDb({
+ connectToStudio: connectToRemote,
+ appToken: appToken.token,
+ tables,
+ root: config.root,
+ srcDir: config.srcDir,
+ output: config.output,
+ seedHandler,
+ });
+ } else {
+ dbPlugin = vitePluginDb({
+ connectToStudio: false,
+ tables,
+ seedFiles,
+ root: config.root,
+ srcDir: config.srcDir,
+ output: config.output,
+ logger,
+ seedHandler,
+ });
+ }
+
+ updateConfig({
+ vite: {
+ assetsInclude: [DB_PATH],
+ plugins: [dbPlugin],
+ },
+ });
+ },
+ 'astro:config:done': async ({ config, injectTypes, buildOutput }) => {
+ if (command === 'preview') return;
+
+ finalBuildOutput = buildOutput;
+
+ // TODO: refine where we load tables
+ // @matthewp: may want to load tables by path at runtime
+ const { dbConfig, dependencies, integrationSeedPaths } = await resolveDbConfig(config);
+ tables.get = () => dbConfig.tables;
+ seedFiles.get = () => integrationSeedPaths;
+ configFileDependencies = dependencies;
+
+ const localDbUrl = new URL(DB_PATH, config.root);
+ if (!connectToRemote && !existsSync(localDbUrl)) {
+ await mkdir(dirname(fileURLToPath(localDbUrl)), { recursive: true });
+ await writeFile(localDbUrl, '');
+ }
+
+ injectTypes({
+ filename: 'db.d.ts',
+ content: getDtsContent(tables.get() ?? {}),
+ });
+ },
+ 'astro:server:setup': async ({ server, logger }) => {
+ seedHandler.execute = async (fileUrl) => {
+ await executeSeedFile({ fileUrl, viteServer: server });
+ };
+ const filesToWatch = [
+ ...CONFIG_FILE_NAMES.map((c) => new URL(c, getDbDirectoryUrl(root))),
+ ...configFileDependencies.map((c) => new URL(c, root)),
+ ];
+ server.watcher.on('all', (_event, relativeEntry) => {
+ const entry = new URL(relativeEntry, root);
+ if (filesToWatch.some((f) => entry.href === f.href)) {
+ server.restart();
+ }
+ });
+ // Wait for dev server log before showing "connected".
+ setTimeout(() => {
+ logger.info(
+ connectToRemote ? 'Connected to remote database.' : 'New local database created.',
+ );
+ if (connectToRemote) return;
+
+ const localSeedPaths = SEED_DEV_FILE_NAME.map(
+ (name) => new URL(name, getDbDirectoryUrl(root)),
+ );
+ // Eager load astro:db module on startup
+ if (seedFiles.get().length || localSeedPaths.find((path) => existsSync(path))) {
+ server.ssrLoadModule(VIRTUAL_MODULE_ID).catch((e) => {
+ logger.error(e instanceof Error ? e.message : String(e));
+ });
+ }
+ }, 100);
+ },
+ 'astro:build:start': async ({ logger }) => {
+ if (!connectToRemote && !databaseFileEnvDefined() && finalBuildOutput === 'server') {
+ const message = `Attempting to build without the --remote flag or the ASTRO_DATABASE_FILE environment variable defined. You probably want to pass --remote to astro build.`;
+ const hint =
+ 'Learn more connecting to Studio: https://docs.astro.build/en/guides/astro-db/#connect-to-astro-studio';
+ throw new AstroDbError(message, hint);
+ }
+
+ logger.info('database: ' + (connectToRemote ? yellow('remote') : blue('local database.')));
+ },
+ 'astro:build:setup': async ({ vite }) => {
+ tempViteServer = await getTempViteServer({ viteConfig: vite });
+ seedHandler.execute = async (fileUrl) => {
+ await executeSeedFile({ fileUrl, viteServer: tempViteServer! });
+ };
+ },
+ 'astro:build:done': async ({}) => {
+ await appToken?.destroy();
+ await tempViteServer?.close();
+ },
+ },
+ };
+}
+
+function databaseFileEnvDefined() {
+ const env = loadEnv('', process.cwd());
+ return env.ASTRO_DATABASE_FILE != null || process.env.ASTRO_DATABASE_FILE != null;
+}
+
+export function integration(): AstroIntegration[] {
+ return [astroDBIntegration(), fileURLIntegration()];
+}
+
+async function executeSeedFile({
+ fileUrl,
+ viteServer,
+}: {
+ fileUrl: URL;
+ viteServer: ViteDevServer;
+}) {
+ const mod = await viteServer.ssrLoadModule(fileUrl.pathname);
+ if (typeof mod.default !== 'function') {
+ throw new AstroDbError(EXEC_DEFAULT_EXPORT_ERROR(fileURLToPath(fileUrl)));
+ }
+ try {
+ await mod.default();
+ } catch (e) {
+ if (isDbError(e)) {
+ throw new AstroDbError(EXEC_ERROR(e.message));
+ }
+ throw e;
+ }
+}
+
+/**
+ * Inspired by Astro content collection config loader.
+ */
+async function getTempViteServer({ viteConfig }: { viteConfig: UserConfig }) {
+ const tempViteServer = await createServer(
+ mergeConfig(viteConfig, {
+ server: { middlewareMode: true, hmr: false, watch: null, ws: false },
+ optimizeDeps: { noDiscovery: true },
+ ssr: { external: [] },
+ logLevel: 'silent',
+ }),
+ );
+
+ const hotSend = tempViteServer.hot.send;
+ tempViteServer.hot.send = (payload: HMRPayload) => {
+ if (payload.type === 'error') {
+ throw payload.err;
+ }
+ return hotSend(payload);
+ };
+
+ return tempViteServer;
+}
diff --git a/packages/db/src/core/integration/typegen.ts b/packages/db/src/core/integration/typegen.ts
new file mode 100644
index 000000000..91364b3c3
--- /dev/null
+++ b/packages/db/src/core/integration/typegen.ts
@@ -0,0 +1,26 @@
+import { RUNTIME_IMPORT } from '../consts.js';
+import type { DBTable, DBTables } from '../types.js';
+
+export function getDtsContent(tables: DBTables) {
+ const content = `// This file is generated by Astro DB
+declare module 'astro:db' {
+${Object.entries(tables)
+ .map(([name, table]) => generateTableType(name, table))
+ .join('\n')}
+}
+`;
+ return content;
+}
+
+function generateTableType(name: string, table: DBTable): string {
+ const sanitizedColumnsList = Object.entries(table.columns)
+ // Filter out deprecated columns from the typegen, so that they don't
+ // appear as queryable fields in the generated types / your codebase.
+ .filter(([, val]) => !val.schema.deprecated);
+ const sanitizedColumns = Object.fromEntries(sanitizedColumnsList);
+ let tableType = ` export const ${name}: import(${RUNTIME_IMPORT}).Table<
+ ${JSON.stringify(name)},
+ ${JSON.stringify(sanitizedColumns)}
+ >;`;
+ return tableType;
+}
diff --git a/packages/db/src/core/integration/vite-plugin-db.ts b/packages/db/src/core/integration/vite-plugin-db.ts
new file mode 100644
index 000000000..80919144d
--- /dev/null
+++ b/packages/db/src/core/integration/vite-plugin-db.ts
@@ -0,0 +1,238 @@
+import { existsSync } from 'node:fs';
+import { fileURLToPath } from 'node:url';
+import type { AstroConfig, AstroIntegrationLogger } from 'astro';
+import { type SQL, sql } from 'drizzle-orm';
+import { SQLiteAsyncDialect } from 'drizzle-orm/sqlite-core';
+import { createLocalDatabaseClient } from '../../runtime/db-client.js';
+import { normalizeDatabaseUrl } from '../../runtime/index.js';
+import { DB_PATH, RUNTIME_IMPORT, RUNTIME_VIRTUAL_IMPORT, VIRTUAL_MODULE_ID } from '../consts.js';
+import { getResolvedFileUrl } from '../load-file.js';
+import { SEED_DEV_FILE_NAME, getCreateIndexQueries, getCreateTableQuery } from '../queries.js';
+import type { DBTables } from '../types.js';
+import {
+ type VitePlugin,
+ getAstroEnv,
+ getDbDirectoryUrl,
+ getRemoteDatabaseInfo,
+} from '../utils.js';
+
+export const resolved = {
+ module: '\0' + VIRTUAL_MODULE_ID,
+ importedFromSeedFile: '\0' + VIRTUAL_MODULE_ID + ':seed',
+};
+
+export type LateTables = {
+ get: () => DBTables;
+};
+export type LateSeedFiles = {
+ get: () => Array<string | URL>;
+};
+export type SeedHandler = {
+ inProgress: boolean;
+ execute: (fileUrl: URL) => Promise<void>;
+};
+
+type VitePluginDBParams =
+ | {
+ connectToStudio: false;
+ tables: LateTables;
+ seedFiles: LateSeedFiles;
+ srcDir: URL;
+ root: URL;
+ logger?: AstroIntegrationLogger;
+ output: AstroConfig['output'];
+ seedHandler: SeedHandler;
+ }
+ | {
+ connectToStudio: true;
+ tables: LateTables;
+ appToken: string;
+ srcDir: URL;
+ root: URL;
+ output: AstroConfig['output'];
+ seedHandler: SeedHandler;
+ };
+
+export function vitePluginDb(params: VitePluginDBParams): VitePlugin {
+ let command: 'build' | 'serve' = 'build';
+ return {
+ name: 'astro:db',
+ enforce: 'pre',
+ configResolved(resolvedConfig) {
+ command = resolvedConfig.command;
+ },
+ async resolveId(id) {
+ if (id !== VIRTUAL_MODULE_ID) return;
+ if (params.seedHandler.inProgress) {
+ return resolved.importedFromSeedFile;
+ }
+ return resolved.module;
+ },
+ async load(id) {
+ if (id !== resolved.module && id !== resolved.importedFromSeedFile) return;
+
+ if (params.connectToStudio) {
+ return getStudioVirtualModContents({
+ appToken: params.appToken,
+ tables: params.tables.get(),
+ isBuild: command === 'build',
+ output: params.output,
+ });
+ }
+
+ // When seeding, we resolved to a different virtual module.
+ // this prevents an infinite loop attempting to rerun seed files.
+ // Short circuit with the module contents in this case.
+ if (id === resolved.importedFromSeedFile) {
+ return getLocalVirtualModContents({
+ root: params.root,
+ tables: params.tables.get(),
+ });
+ }
+
+ await recreateTables(params);
+ const seedFiles = getResolvedSeedFiles(params);
+ for await (const seedFile of seedFiles) {
+ // Use `addWatchFile()` to invalidate the `astro:db` module
+ // when a seed file changes.
+ this.addWatchFile(fileURLToPath(seedFile));
+ if (existsSync(seedFile)) {
+ params.seedHandler.inProgress = true;
+ await params.seedHandler.execute(seedFile);
+ }
+ }
+ if (params.seedHandler.inProgress) {
+ (params.logger ?? console).info('Seeded database.');
+ params.seedHandler.inProgress = false;
+ }
+ return getLocalVirtualModContents({
+ root: params.root,
+ tables: params.tables.get(),
+ });
+ },
+ };
+}
+
+export function getConfigVirtualModContents() {
+ return `export * from ${RUNTIME_VIRTUAL_IMPORT}`;
+}
+
+export function getLocalVirtualModContents({
+ tables,
+ root,
+}: {
+ tables: DBTables;
+ root: URL;
+}) {
+ const { ASTRO_DATABASE_FILE } = getAstroEnv();
+ const dbInfo = getRemoteDatabaseInfo();
+ const dbUrl = new URL(DB_PATH, root);
+ return `
+import { asDrizzleTable, createLocalDatabaseClient, normalizeDatabaseUrl } from ${RUNTIME_IMPORT};
+
+const dbUrl = normalizeDatabaseUrl(${JSON.stringify(ASTRO_DATABASE_FILE)}, ${JSON.stringify(dbUrl)});
+export const db = createLocalDatabaseClient({ dbUrl, enableTransactions: ${dbInfo.url === 'libsql'} });
+
+export * from ${RUNTIME_VIRTUAL_IMPORT};
+
+${getStringifiedTableExports(tables)}`;
+}
+
+export function getStudioVirtualModContents({
+ tables,
+ appToken,
+ isBuild,
+ output,
+}: {
+ tables: DBTables;
+ appToken: string;
+ isBuild: boolean;
+ output: AstroConfig['output'];
+}) {
+ const dbInfo = getRemoteDatabaseInfo();
+
+ function appTokenArg() {
+ if (isBuild) {
+ const envPrefix = dbInfo.type === 'studio' ? 'ASTRO_STUDIO' : 'ASTRO_DB';
+ if (output === 'server') {
+ // In production build, always read the runtime environment variable.
+ return `process.env.${envPrefix}_APP_TOKEN`;
+ } else {
+ // Static mode or prerendering needs the local app token.
+ return `process.env.${envPrefix}_APP_TOKEN ?? ${JSON.stringify(appToken)}`;
+ }
+ } else {
+ return JSON.stringify(appToken);
+ }
+ }
+
+ function dbUrlArg() {
+ const dbStr = JSON.stringify(dbInfo.url);
+
+ if (isBuild) {
+ // Allow overriding, mostly for testing
+ return dbInfo.type === 'studio'
+ ? `import.meta.env.ASTRO_STUDIO_REMOTE_DB_URL ?? ${dbStr}`
+ : `import.meta.env.ASTRO_DB_REMOTE_URL ?? ${dbStr}`;
+ } else {
+ return dbStr;
+ }
+ }
+
+ return `
+import {asDrizzleTable, createRemoteDatabaseClient} from ${RUNTIME_IMPORT};
+
+export const db = await createRemoteDatabaseClient({
+ dbType: ${JSON.stringify(dbInfo.type)},
+ remoteUrl: ${dbUrlArg()},
+ appToken: ${appTokenArg()},
+});
+
+export * from ${RUNTIME_VIRTUAL_IMPORT};
+
+${getStringifiedTableExports(tables)}
+ `;
+}
+
+function getStringifiedTableExports(tables: DBTables) {
+ return Object.entries(tables)
+ .map(
+ ([name, table]) =>
+ `export const ${name} = asDrizzleTable(${JSON.stringify(name)}, ${JSON.stringify(
+ table,
+ )}, false)`,
+ )
+ .join('\n');
+}
+
+const sqlite = new SQLiteAsyncDialect();
+
+async function recreateTables({ tables, root }: { tables: LateTables; root: URL }) {
+ const dbInfo = getRemoteDatabaseInfo();
+ const { ASTRO_DATABASE_FILE } = getAstroEnv();
+ const dbUrl = normalizeDatabaseUrl(ASTRO_DATABASE_FILE, new URL(DB_PATH, root).href);
+ const db = createLocalDatabaseClient({ dbUrl, enableTransactions: dbInfo.type === 'libsql' });
+ const setupQueries: SQL[] = [];
+ for (const [name, table] of Object.entries(tables.get() ?? {})) {
+ const dropQuery = sql.raw(`DROP TABLE IF EXISTS ${sqlite.escapeName(name)}`);
+ const createQuery = sql.raw(getCreateTableQuery(name, table));
+ const indexQueries = getCreateIndexQueries(name, table);
+ setupQueries.push(dropQuery, createQuery, ...indexQueries.map((s) => sql.raw(s)));
+ }
+ await db.batch([
+ db.run(sql`pragma defer_foreign_keys=true;`),
+ ...setupQueries.map((q) => db.run(q)),
+ ]);
+}
+
+function getResolvedSeedFiles({
+ root,
+ seedFiles,
+}: {
+ root: URL;
+ seedFiles: LateSeedFiles;
+}) {
+ const localSeedFiles = SEED_DEV_FILE_NAME.map((name) => new URL(name, getDbDirectoryUrl(root)));
+ const integrationSeedFiles = seedFiles.get().map((s) => getResolvedFileUrl(root, s));
+ return [...integrationSeedFiles, ...localSeedFiles];
+}
diff --git a/packages/db/src/core/load-file.ts b/packages/db/src/core/load-file.ts
new file mode 100644
index 000000000..027deaa60
--- /dev/null
+++ b/packages/db/src/core/load-file.ts
@@ -0,0 +1,206 @@
+import { existsSync } from 'node:fs';
+import { unlink, writeFile } from 'node:fs/promises';
+import { createRequire } from 'node:module';
+import { fileURLToPath, pathToFileURL } from 'node:url';
+import type { AstroConfig } from 'astro';
+import { build as esbuild } from 'esbuild';
+import { CONFIG_FILE_NAMES, VIRTUAL_MODULE_ID } from './consts.js';
+import { INTEGRATION_TABLE_CONFLICT_ERROR } from './errors.js';
+import { errorMap } from './integration/error-map.js';
+import { getConfigVirtualModContents } from './integration/vite-plugin-db.js';
+import { dbConfigSchema } from './schemas.js';
+import './types.js';
+import { getAstroEnv, getDbDirectoryUrl } from './utils.js';
+
+/**
+ * Load a user’s `astro:db` configuration file and additional configuration files provided by integrations.
+ */
+export async function resolveDbConfig({
+ root,
+ integrations,
+}: Pick<AstroConfig, 'root' | 'integrations'>) {
+ const { mod, dependencies } = await loadUserConfigFile(root);
+ const userDbConfig = dbConfigSchema.parse(mod?.default ?? {}, { errorMap });
+ /** Resolved `astro:db` config including tables provided by integrations. */
+ const dbConfig = { tables: userDbConfig.tables ?? {} };
+
+ // Collect additional config and seed files from integrations.
+ const integrationDbConfigPaths: Array<{ name: string; configEntrypoint: string | URL }> = [];
+ const integrationSeedPaths: Array<string | URL> = [];
+ for (const integration of integrations) {
+ const { name, hooks } = integration;
+ if (hooks['astro:db:setup']) {
+ hooks['astro:db:setup']({
+ extendDb({ configEntrypoint, seedEntrypoint }) {
+ if (configEntrypoint) {
+ integrationDbConfigPaths.push({ name, configEntrypoint });
+ }
+ if (seedEntrypoint) {
+ integrationSeedPaths.push(seedEntrypoint);
+ }
+ },
+ });
+ }
+ }
+ for (const { name, configEntrypoint } of integrationDbConfigPaths) {
+ // TODO: config file dependencies are not tracked for integrations for now.
+ const loadedConfig = await loadIntegrationConfigFile(root, configEntrypoint);
+ const integrationDbConfig = dbConfigSchema.parse(loadedConfig.mod?.default ?? {}, {
+ errorMap,
+ });
+ for (const key in integrationDbConfig.tables) {
+ if (key in dbConfig.tables) {
+ const isUserConflict = key in (userDbConfig.tables ?? {});
+ throw new Error(INTEGRATION_TABLE_CONFLICT_ERROR(name, key, isUserConflict));
+ } else {
+ dbConfig.tables[key] = integrationDbConfig.tables[key];
+ }
+ }
+ }
+
+ return {
+ /** Resolved `astro:db` config, including tables added by integrations. */
+ dbConfig,
+ /** Dependencies imported into the user config file. */
+ dependencies,
+ /** Additional `astro:db` seed file paths provided by integrations. */
+ integrationSeedPaths,
+ };
+}
+
+async function loadUserConfigFile(
+ root: URL,
+): Promise<{ mod: { default?: unknown } | undefined; dependencies: string[] }> {
+ let configFileUrl: URL | undefined;
+ for (const fileName of CONFIG_FILE_NAMES) {
+ const fileUrl = new URL(fileName, getDbDirectoryUrl(root));
+ if (existsSync(fileUrl)) {
+ configFileUrl = fileUrl;
+ }
+ }
+ return await loadAndBundleDbConfigFile({ root, fileUrl: configFileUrl });
+}
+
+export function getResolvedFileUrl(root: URL, filePathOrUrl: string | URL): URL {
+ if (typeof filePathOrUrl === 'string') {
+ const { resolve } = createRequire(root);
+ const resolvedFilePath = resolve(filePathOrUrl);
+ return pathToFileURL(resolvedFilePath);
+ }
+ return filePathOrUrl;
+}
+
+async function loadIntegrationConfigFile(root: URL, filePathOrUrl: string | URL) {
+ const fileUrl = getResolvedFileUrl(root, filePathOrUrl);
+ return await loadAndBundleDbConfigFile({ root, fileUrl });
+}
+
+async function loadAndBundleDbConfigFile({
+ root,
+ fileUrl,
+}: {
+ root: URL;
+ fileUrl: URL | undefined;
+}): Promise<{ mod: { default?: unknown } | undefined; dependencies: string[] }> {
+ if (!fileUrl) {
+ return { mod: undefined, dependencies: [] };
+ }
+ const { code, dependencies } = await bundleFile({
+ virtualModContents: getConfigVirtualModContents(),
+ root,
+ fileUrl,
+ });
+ return {
+ mod: await importBundledFile({ code, root }),
+ dependencies,
+ };
+}
+
+/**
+ * Bundle arbitrary `mjs` or `ts` file.
+ * Simplified fork from Vite's `bundleConfigFile` function.
+ *
+ * @see https://github.com/vitejs/vite/blob/main/packages/vite/src/node/config.ts#L961
+ */
+export async function bundleFile({
+ fileUrl,
+ root,
+ virtualModContents,
+}: {
+ fileUrl: URL;
+ root: URL;
+ virtualModContents: string;
+}) {
+ const { ASTRO_DATABASE_FILE } = getAstroEnv();
+ const result = await esbuild({
+ absWorkingDir: process.cwd(),
+ entryPoints: [fileURLToPath(fileUrl)],
+ outfile: 'out.js',
+ packages: 'external',
+ write: false,
+ target: ['node16'],
+ platform: 'node',
+ bundle: true,
+ format: 'esm',
+ sourcemap: 'inline',
+ metafile: true,
+ define: {
+ 'import.meta.env.ASTRO_STUDIO_REMOTE_DB_URL': 'undefined',
+ 'import.meta.env.ASTRO_DB_REMOTE_DB_URL': 'undefined',
+ 'import.meta.env.ASTRO_DATABASE_FILE': JSON.stringify(ASTRO_DATABASE_FILE ?? ''),
+ },
+ plugins: [
+ {
+ name: 'resolve-astro-db',
+ setup(build) {
+ build.onResolve({ filter: /^astro:db$/ }, ({ path }) => {
+ return { path, namespace: VIRTUAL_MODULE_ID };
+ });
+ build.onLoad({ namespace: VIRTUAL_MODULE_ID, filter: /.*/ }, () => {
+ return {
+ contents: virtualModContents,
+ // Needed to resolve runtime dependencies
+ resolveDir: fileURLToPath(root),
+ };
+ });
+ },
+ },
+ ],
+ });
+
+ const file = result.outputFiles[0];
+ if (!file) {
+ throw new Error(`Unexpected: no output file`);
+ }
+
+ return {
+ code: file.text,
+ dependencies: Object.keys(result.metafile.inputs),
+ };
+}
+
+/**
+ * Forked from Vite config loader, replacing CJS-based path concat with ESM only
+ *
+ * @see https://github.com/vitejs/vite/blob/main/packages/vite/src/node/config.ts#L1074
+ */
+export async function importBundledFile({
+ code,
+ root,
+}: {
+ code: string;
+ root: URL;
+}): Promise<{ default?: unknown }> {
+ // Write it to disk, load it with native Node ESM, then delete the file.
+ const tmpFileUrl = new URL(`./db.timestamp-${Date.now()}.mjs`, root);
+ await writeFile(tmpFileUrl, code, { encoding: 'utf8' });
+ try {
+ return await import(/* @vite-ignore */ tmpFileUrl.toString());
+ } finally {
+ try {
+ await unlink(tmpFileUrl);
+ } catch {
+ // already removed if this function is called twice simultaneously
+ }
+ }
+}
diff --git a/packages/db/src/core/queries.ts b/packages/db/src/core/queries.ts
new file mode 100644
index 000000000..e77578ac2
--- /dev/null
+++ b/packages/db/src/core/queries.ts
@@ -0,0 +1,206 @@
+import type { SQL } from 'drizzle-orm';
+import { SQLiteAsyncDialect } from 'drizzle-orm/sqlite-core';
+import { bold } from 'kleur/colors';
+import {
+ FOREIGN_KEY_DNE_ERROR,
+ FOREIGN_KEY_REFERENCES_EMPTY_ERROR,
+ FOREIGN_KEY_REFERENCES_LENGTH_ERROR,
+ REFERENCE_DNE_ERROR,
+} from '../runtime/errors.js';
+import { hasPrimaryKey } from '../runtime/index.js';
+import { isSerializedSQL } from '../runtime/types.js';
+import type {
+ BooleanColumn,
+ ColumnType,
+ DBColumn,
+ DBTable,
+ DateColumn,
+ JsonColumn,
+ NumberColumn,
+ TextColumn,
+} from './types.js';
+
+const sqlite = new SQLiteAsyncDialect();
+
+export const SEED_DEV_FILE_NAME = ['seed.ts', 'seed.js', 'seed.mjs', 'seed.mts'];
+
+export function getDropTableIfExistsQuery(tableName: string) {
+ return `DROP TABLE IF EXISTS ${sqlite.escapeName(tableName)}`;
+}
+
+export function getCreateTableQuery(tableName: string, table: DBTable) {
+ let query = `CREATE TABLE ${sqlite.escapeName(tableName)} (`;
+
+ const colQueries = [];
+ const colHasPrimaryKey = Object.entries(table.columns).find(([, column]) =>
+ hasPrimaryKey(column),
+ );
+ if (!colHasPrimaryKey) {
+ colQueries.push('_id INTEGER PRIMARY KEY');
+ }
+ for (const [columnName, column] of Object.entries(table.columns)) {
+ const colQuery = `${sqlite.escapeName(columnName)} ${schemaTypeToSqlType(
+ column.type,
+ )}${getModifiers(columnName, column)}`;
+ colQueries.push(colQuery);
+ }
+
+ colQueries.push(...getCreateForeignKeyQueries(tableName, table));
+
+ query += colQueries.join(', ') + ')';
+ return query;
+}
+
+export function getCreateIndexQueries(tableName: string, table: Pick<DBTable, 'indexes'>) {
+ let queries: string[] = [];
+ for (const [indexName, indexProps] of Object.entries(table.indexes ?? {})) {
+ const onColNames = asArray(indexProps.on);
+ const onCols = onColNames.map((colName) => sqlite.escapeName(colName));
+
+ const unique = indexProps.unique ? 'UNIQUE ' : '';
+ const indexQuery = `CREATE ${unique}INDEX ${sqlite.escapeName(
+ indexName,
+ )} ON ${sqlite.escapeName(tableName)} (${onCols.join(', ')})`;
+ queries.push(indexQuery);
+ }
+ return queries;
+}
+
+export function getCreateForeignKeyQueries(tableName: string, table: DBTable) {
+ let queries: string[] = [];
+ for (const foreignKey of table.foreignKeys ?? []) {
+ const columns = asArray(foreignKey.columns);
+ const references = asArray(foreignKey.references);
+
+ if (columns.length !== references.length) {
+ throw new Error(FOREIGN_KEY_REFERENCES_LENGTH_ERROR(tableName));
+ }
+ const firstReference = references[0];
+ if (!firstReference) {
+ throw new Error(FOREIGN_KEY_REFERENCES_EMPTY_ERROR(tableName));
+ }
+ const referencedTable = firstReference.schema.collection;
+ if (!referencedTable) {
+ throw new Error(FOREIGN_KEY_DNE_ERROR(tableName));
+ }
+ const query = `FOREIGN KEY (${columns
+ .map((f) => sqlite.escapeName(f))
+ .join(', ')}) REFERENCES ${sqlite.escapeName(referencedTable)}(${references
+ .map((r) => sqlite.escapeName(r.schema.name!))
+ .join(', ')})`;
+ queries.push(query);
+ }
+ return queries;
+}
+
+function asArray<T>(value: T | T[]) {
+ return Array.isArray(value) ? value : [value];
+}
+
+export function schemaTypeToSqlType(type: ColumnType): 'text' | 'integer' {
+ switch (type) {
+ case 'date':
+ case 'text':
+ case 'json':
+ return 'text';
+ case 'number':
+ case 'boolean':
+ return 'integer';
+ }
+}
+
+export function getModifiers(columnName: string, column: DBColumn) {
+ let modifiers = '';
+ if (hasPrimaryKey(column)) {
+ return ' PRIMARY KEY';
+ }
+ if (!column.schema.optional) {
+ modifiers += ' NOT NULL';
+ }
+ if (column.schema.unique) {
+ modifiers += ' UNIQUE';
+ }
+ if (hasDefault(column)) {
+ modifiers += ` DEFAULT ${getDefaultValueSql(columnName, column)}`;
+ }
+ const references = getReferencesConfig(column);
+ if (references) {
+ const { collection: tableName, name } = references.schema;
+ if (!tableName || !name) {
+ throw new Error(REFERENCE_DNE_ERROR(columnName));
+ }
+
+ modifiers += ` REFERENCES ${sqlite.escapeName(tableName)} (${sqlite.escapeName(name)})`;
+ }
+ return modifiers;
+}
+
+export function getReferencesConfig(column: DBColumn) {
+ const canHaveReferences = column.type === 'number' || column.type === 'text';
+ if (!canHaveReferences) return undefined;
+ return column.schema.references;
+}
+
+// Using `DBColumn` will not narrow `default` based on the column `type`
+// Handle each column separately
+type WithDefaultDefined<T extends DBColumn> = T & {
+ schema: Required<Pick<T['schema'], 'default'>>;
+};
+type DBColumnWithDefault =
+ | WithDefaultDefined<TextColumn>
+ | WithDefaultDefined<DateColumn>
+ | WithDefaultDefined<NumberColumn>
+ | WithDefaultDefined<BooleanColumn>
+ | WithDefaultDefined<JsonColumn>;
+
+// Type narrowing the default fails on union types, so use a type guard
+export function hasDefault(column: DBColumn): column is DBColumnWithDefault {
+ if (column.schema.default !== undefined) {
+ return true;
+ }
+ if (hasPrimaryKey(column) && column.type === 'number') {
+ return true;
+ }
+ return false;
+}
+
+function toDefault<T>(def: T | SQL<any>): string {
+ const type = typeof def;
+ if (type === 'string') {
+ return sqlite.escapeString(def as string);
+ } else if (type === 'boolean') {
+ return def ? 'TRUE' : 'FALSE';
+ } else {
+ return def + '';
+ }
+}
+
+function getDefaultValueSql(columnName: string, column: DBColumnWithDefault): string {
+ if (isSerializedSQL(column.schema.default)) {
+ return column.schema.default.sql;
+ }
+
+ switch (column.type) {
+ case 'boolean':
+ case 'number':
+ case 'text':
+ case 'date':
+ return toDefault(column.schema.default);
+ case 'json': {
+ let stringified = '';
+ try {
+ stringified = JSON.stringify(column.schema.default);
+ } catch {
+ // biome-ignore lint/suspicious/noConsoleLog: allowed
+ console.log(
+ `Invalid default value for column ${bold(
+ columnName,
+ )}. Defaults must be valid JSON when using the \`json()\` type.`,
+ );
+ process.exit(0);
+ }
+
+ return sqlite.escapeString(stringified);
+ }
+ }
+}
diff --git a/packages/db/src/core/schemas.ts b/packages/db/src/core/schemas.ts
new file mode 100644
index 000000000..c9575a79a
--- /dev/null
+++ b/packages/db/src/core/schemas.ts
@@ -0,0 +1,247 @@
+import { SQL } from 'drizzle-orm';
+import { SQLiteAsyncDialect } from 'drizzle-orm/sqlite-core';
+import { type ZodTypeDef, z } from 'zod';
+import { SERIALIZED_SQL_KEY, type SerializedSQL } from '../runtime/types.js';
+import { errorMap } from './integration/error-map.js';
+import type { NumberColumn, TextColumn } from './types.js';
+import { mapObject } from './utils.js';
+
+export type MaybeArray<T> = T | T[];
+
+// Transform to serializable object for migration files
+const sqlite = new SQLiteAsyncDialect();
+
+const sqlSchema = z.instanceof(SQL<any>).transform(
+ (sqlObj): SerializedSQL => ({
+ [SERIALIZED_SQL_KEY]: true,
+ sql: sqlite.sqlToQuery(sqlObj).sql,
+ }),
+);
+
+const baseColumnSchema = z.object({
+ label: z.string().optional(),
+ optional: z.boolean().optional().default(false),
+ unique: z.boolean().optional().default(false),
+ deprecated: z.boolean().optional().default(false),
+
+ // Defined when `defineDb()` is called to resolve `references`
+ name: z.string().optional(),
+ // TODO: Update to `table`. Will need migration file version change
+ collection: z.string().optional(),
+});
+
+export const booleanColumnSchema = z.object({
+ type: z.literal('boolean'),
+ schema: baseColumnSchema.extend({
+ default: z.union([z.boolean(), sqlSchema]).optional(),
+ }),
+});
+
+const numberColumnBaseSchema = baseColumnSchema.omit({ optional: true }).and(
+ z.union([
+ z.object({
+ primaryKey: z.literal(false).optional().default(false),
+ optional: baseColumnSchema.shape.optional,
+ default: z.union([z.number(), sqlSchema]).optional(),
+ }),
+ z.object({
+ // `integer primary key` uses ROWID as the default value.
+ // `optional` and `default` do not have an effect,
+ // so disable these config options for primary keys.
+ primaryKey: z.literal(true),
+ optional: z.literal(false).optional(),
+ default: z.literal(undefined).optional(),
+ }),
+ ]),
+);
+
+export const numberColumnOptsSchema: z.ZodType<
+ z.infer<typeof numberColumnBaseSchema> & {
+ // ReferenceableColumn creates a circular type. Define ZodType to resolve.
+ references?: NumberColumn;
+ },
+ ZodTypeDef,
+ z.input<typeof numberColumnBaseSchema> & {
+ references?: () => z.input<typeof numberColumnSchema>;
+ }
+> = numberColumnBaseSchema.and(
+ z.object({
+ references: z
+ .function()
+ .returns(z.lazy(() => numberColumnSchema))
+ .optional()
+ .transform((fn) => fn?.()),
+ }),
+);
+
+export const numberColumnSchema = z.object({
+ type: z.literal('number'),
+ schema: numberColumnOptsSchema,
+});
+
+const textColumnBaseSchema = baseColumnSchema
+ .omit({ optional: true })
+ .extend({
+ default: z.union([z.string(), sqlSchema]).optional(),
+ multiline: z.boolean().optional(),
+ })
+ .and(
+ z.union([
+ z.object({
+ primaryKey: z.literal(false).optional().default(false),
+ optional: baseColumnSchema.shape.optional,
+ }),
+ z.object({
+ // text primary key allows NULL values.
+ // NULL values bypass unique checks, which could
+ // lead to duplicate URLs per record in Astro Studio.
+ // disable `optional` for primary keys.
+ primaryKey: z.literal(true),
+ optional: z.literal(false).optional(),
+ }),
+ ]),
+ );
+
+export const textColumnOptsSchema: z.ZodType<
+ z.infer<typeof textColumnBaseSchema> & {
+ // ReferenceableColumn creates a circular type. Define ZodType to resolve.
+ references?: TextColumn;
+ },
+ ZodTypeDef,
+ z.input<typeof textColumnBaseSchema> & {
+ references?: () => z.input<typeof textColumnSchema>;
+ }
+> = textColumnBaseSchema.and(
+ z.object({
+ references: z
+ .function()
+ .returns(z.lazy(() => textColumnSchema))
+ .optional()
+ .transform((fn) => fn?.()),
+ }),
+);
+
+export const textColumnSchema = z.object({
+ type: z.literal('text'),
+ schema: textColumnOptsSchema,
+});
+
+export const dateColumnSchema = z.object({
+ type: z.literal('date'),
+ schema: baseColumnSchema.extend({
+ default: z
+ .union([
+ sqlSchema,
+ // transform to ISO string for serialization
+ z
+ .date()
+ .transform((d) => d.toISOString()),
+ ])
+ .optional(),
+ }),
+});
+
+export const jsonColumnSchema = z.object({
+ type: z.literal('json'),
+ schema: baseColumnSchema.extend({
+ default: z.unknown().optional(),
+ }),
+});
+
+export const columnSchema = z.discriminatedUnion('type', [
+ booleanColumnSchema,
+ numberColumnSchema,
+ textColumnSchema,
+ dateColumnSchema,
+ jsonColumnSchema,
+]);
+export const referenceableColumnSchema = z.union([textColumnSchema, numberColumnSchema]);
+
+export const columnsSchema = z.record(columnSchema);
+
+type ForeignKeysInput = {
+ columns: MaybeArray<string>;
+ references: () => MaybeArray<Omit<z.input<typeof referenceableColumnSchema>, 'references'>>;
+};
+
+type ForeignKeysOutput = Omit<ForeignKeysInput, 'references'> & {
+ // reference fn called in `transform`. Ensures output is JSON serializable.
+ references: MaybeArray<Omit<z.output<typeof referenceableColumnSchema>, 'references'>>;
+};
+
+const foreignKeysSchema: z.ZodType<ForeignKeysOutput, ZodTypeDef, ForeignKeysInput> = z.object({
+ columns: z.string().or(z.array(z.string())),
+ references: z
+ .function()
+ .returns(z.lazy(() => referenceableColumnSchema.or(z.array(referenceableColumnSchema))))
+ .transform((fn) => fn()),
+});
+
+export const resolvedIndexSchema = z.object({
+ on: z.string().or(z.array(z.string())),
+ unique: z.boolean().optional(),
+});
+/** @deprecated */
+const legacyIndexesSchema = z.record(resolvedIndexSchema);
+
+export const indexSchema = z.object({
+ on: z.string().or(z.array(z.string())),
+ unique: z.boolean().optional(),
+ name: z.string().optional(),
+});
+const indexesSchema = z.array(indexSchema);
+
+export const tableSchema = z.object({
+ columns: columnsSchema,
+ indexes: indexesSchema.or(legacyIndexesSchema).optional(),
+ foreignKeys: z.array(foreignKeysSchema).optional(),
+ deprecated: z.boolean().optional().default(false),
+});
+
+export const tablesSchema = z.preprocess((rawTables) => {
+ // Use `z.any()` to avoid breaking object references
+ const tables = z.record(z.any()).parse(rawTables, { errorMap });
+ for (const [tableName, table] of Object.entries(tables)) {
+ // Append table and column names to columns.
+ // Used to track table info for references.
+ table.getName = () => tableName;
+ const { columns } = z.object({ columns: z.record(z.any()) }).parse(table, { errorMap });
+ for (const [columnName, column] of Object.entries(columns)) {
+ column.schema.name = columnName;
+ column.schema.collection = tableName;
+ }
+ }
+ return rawTables;
+}, z.record(tableSchema));
+
+export const dbConfigSchema = z
+ .object({
+ tables: tablesSchema.optional(),
+ })
+ .transform(({ tables = {}, ...config }) => {
+ return {
+ ...config,
+ tables: mapObject(tables, (tableName, table) => {
+ const { indexes = {} } = table;
+ if (!Array.isArray(indexes)) {
+ return { ...table, indexes };
+ }
+ const resolvedIndexes: Record<string, z.infer<typeof resolvedIndexSchema>> = {};
+ for (const index of indexes) {
+ if (index.name) {
+ const { name, ...rest } = index;
+ resolvedIndexes[index.name] = rest;
+ continue;
+ }
+ // Sort index columns to ensure consistent index names
+ const indexOn = Array.isArray(index.on) ? index.on.sort().join('_') : index.on;
+ const name = tableName + '_' + indexOn + '_idx';
+ resolvedIndexes[name] = index;
+ }
+ return {
+ ...table,
+ indexes: resolvedIndexes,
+ };
+ }),
+ };
+ });
diff --git a/packages/db/src/core/types.ts b/packages/db/src/core/types.ts
new file mode 100644
index 000000000..5efc6507c
--- /dev/null
+++ b/packages/db/src/core/types.ts
@@ -0,0 +1,102 @@
+import type { z } from 'zod';
+import type {
+ MaybeArray,
+ booleanColumnSchema,
+ columnSchema,
+ columnsSchema,
+ dateColumnSchema,
+ dbConfigSchema,
+ indexSchema,
+ jsonColumnSchema,
+ numberColumnOptsSchema,
+ numberColumnSchema,
+ referenceableColumnSchema,
+ resolvedIndexSchema,
+ tableSchema,
+ textColumnOptsSchema,
+ textColumnSchema,
+} from './schemas.js';
+
+export type ResolvedIndexes = z.output<typeof dbConfigSchema>['tables'][string]['indexes'];
+export type BooleanColumn = z.infer<typeof booleanColumnSchema>;
+export type BooleanColumnInput = z.input<typeof booleanColumnSchema>;
+export type NumberColumn = z.infer<typeof numberColumnSchema>;
+export type NumberColumnInput = z.input<typeof numberColumnSchema>;
+export type TextColumn = z.infer<typeof textColumnSchema>;
+export type TextColumnInput = z.input<typeof textColumnSchema>;
+export type DateColumn = z.infer<typeof dateColumnSchema>;
+export type DateColumnInput = z.input<typeof dateColumnSchema>;
+export type JsonColumn = z.infer<typeof jsonColumnSchema>;
+export type JsonColumnInput = z.input<typeof jsonColumnSchema>;
+
+export type ColumnType =
+ | BooleanColumn['type']
+ | NumberColumn['type']
+ | TextColumn['type']
+ | DateColumn['type']
+ | JsonColumn['type'];
+
+export type DBColumn = z.infer<typeof columnSchema>;
+export type DBColumnInput =
+ | DateColumnInput
+ | BooleanColumnInput
+ | NumberColumnInput
+ | TextColumnInput
+ | JsonColumnInput;
+export type DBColumns = z.infer<typeof columnsSchema>;
+export type DBTable = z.infer<typeof tableSchema>;
+export type DBTables = Record<string, DBTable>;
+export type ResolvedDBTables = z.output<typeof dbConfigSchema>['tables'];
+export type ResolvedDBTable = z.output<typeof dbConfigSchema>['tables'][string];
+export type DBSnapshot = {
+ schema: Record<string, ResolvedDBTable>;
+ version: string;
+};
+
+export type DBConfigInput = z.input<typeof dbConfigSchema>;
+export type DBConfig = z.infer<typeof dbConfigSchema>;
+
+export type ColumnsConfig = z.input<typeof tableSchema>['columns'];
+export type OutputColumnsConfig = z.output<typeof tableSchema>['columns'];
+
+export interface TableConfig<TColumns extends ColumnsConfig = ColumnsConfig>
+ // use `extends` to ensure types line up with zod,
+ // only adding generics for type completions.
+ extends Pick<z.input<typeof tableSchema>, 'columns' | 'indexes' | 'foreignKeys'> {
+ columns: TColumns;
+ foreignKeys?: Array<{
+ columns: MaybeArray<Extract<keyof TColumns, string>>;
+ references: () => MaybeArray<z.input<typeof referenceableColumnSchema>>;
+ }>;
+ indexes?: Array<IndexConfig<TColumns>> | Record<string, LegacyIndexConfig<TColumns>>;
+ deprecated?: boolean;
+}
+
+interface IndexConfig<TColumns extends ColumnsConfig> extends z.input<typeof indexSchema> {
+ on: MaybeArray<Extract<keyof TColumns, string>>;
+}
+
+/** @deprecated */
+interface LegacyIndexConfig<TColumns extends ColumnsConfig>
+ extends z.input<typeof resolvedIndexSchema> {
+ on: MaybeArray<Extract<keyof TColumns, string>>;
+}
+
+// We cannot use `Omit<NumberColumn | TextColumn, 'type'>`,
+// since Omit collapses our union type on primary key.
+export type NumberColumnOpts = z.input<typeof numberColumnOptsSchema>;
+export type TextColumnOpts = z.input<typeof textColumnOptsSchema>;
+
+declare global {
+ // eslint-disable-next-line @typescript-eslint/no-namespace
+ namespace Astro {
+ export interface IntegrationHooks {
+ 'astro:db:setup'?: (options: {
+ extendDb: (options: {
+ configEntrypoint?: URL | string;
+ seedEntrypoint?: URL | string;
+ }) => void;
+ }) => void | Promise<void>;
+ }
+ }
+}
diff --git a/packages/db/src/core/utils.ts b/packages/db/src/core/utils.ts
new file mode 100644
index 000000000..b246997e2
--- /dev/null
+++ b/packages/db/src/core/utils.ts
@@ -0,0 +1,80 @@
+import { type ManagedAppToken, getAstroStudioEnv, getManagedAppTokenOrExit } from '@astrojs/studio';
+import type { AstroConfig, AstroIntegration } from 'astro';
+import { loadEnv } from 'vite';
+import './types.js';
+
+export type VitePlugin = Required<AstroConfig['vite']>['plugins'][number];
+
+export function getAstroEnv(envMode = ''): Record<`ASTRO_${string}`, string> {
+ const env = loadEnv(envMode, process.cwd(), 'ASTRO_');
+ return env;
+}
+
+export type RemoteDatabaseInfo = {
+ type: 'libsql' | 'studio';
+ url: string;
+};
+
+export function getRemoteDatabaseInfo(): RemoteDatabaseInfo {
+ const astroEnv = getAstroEnv();
+ const studioEnv = getAstroStudioEnv();
+
+ if (studioEnv.ASTRO_STUDIO_REMOTE_DB_URL)
+ return {
+ type: 'studio',
+ url: studioEnv.ASTRO_STUDIO_REMOTE_DB_URL,
+ };
+
+ if (astroEnv.ASTRO_DB_REMOTE_URL)
+ return {
+ type: 'libsql',
+ url: astroEnv.ASTRO_DB_REMOTE_URL,
+ };
+
+ return {
+ type: 'studio',
+ url: 'https://db.services.astro.build',
+ };
+}
+
+export function getManagedRemoteToken(
+ token?: string,
+ dbInfo?: RemoteDatabaseInfo,
+): Promise<ManagedAppToken> {
+ dbInfo ??= getRemoteDatabaseInfo();
+
+ if (dbInfo.type === 'studio') {
+ return getManagedAppTokenOrExit(token);
+ }
+
+ const astroEnv = getAstroEnv();
+
+ return Promise.resolve({
+ token: token ?? astroEnv.ASTRO_DB_APP_TOKEN,
+ renew: () => Promise.resolve(),
+ destroy: () => Promise.resolve(),
+ });
+}
+
+export function getDbDirectoryUrl(root: URL | string) {
+ return new URL('db/', root);
+}
+
+export function defineDbIntegration(integration: AstroIntegration): AstroIntegration {
+ return integration;
+}
+
+export type Result<T> = { success: true; data: T } | { success: false; data: unknown };
+
+/**
+ * Map an object's values to a new set of values
+ * while preserving types.
+ */
+export function mapObject<T, U = T>(
+ item: Record<string, T>,
+ callback: (key: string, value: T) => U,
+): Record<string, U> {
+ return Object.fromEntries(
+ Object.entries(item).map(([key, value]) => [key, callback(key, value)]),
+ );
+}
diff --git a/packages/db/src/index.ts b/packages/db/src/index.ts
new file mode 100644
index 000000000..f7022a24a
--- /dev/null
+++ b/packages/db/src/index.ts
@@ -0,0 +1,3 @@
+export type { TableConfig } from './core/types.js';
+export { cli } from './core/cli/index.js';
+export { integration as default } from './core/integration/index.js';
diff --git a/packages/db/src/runtime/db-client.ts b/packages/db/src/runtime/db-client.ts
new file mode 100644
index 000000000..21f45aa45
--- /dev/null
+++ b/packages/db/src/runtime/db-client.ts
@@ -0,0 +1,254 @@
+import type { InStatement } from '@libsql/client';
+import { type Config as LibSQLConfig, createClient } from '@libsql/client';
+import type { LibSQLDatabase } from 'drizzle-orm/libsql';
+import { drizzle as drizzleLibsql } from 'drizzle-orm/libsql';
+import { type SqliteRemoteDatabase, drizzle as drizzleProxy } from 'drizzle-orm/sqlite-proxy';
+import { z } from 'zod';
+import { DetailedLibsqlError, safeFetch } from './utils.js';
+
+const isWebContainer = !!process.versions?.webcontainer;
+
+function applyTransactionNotSupported(db: SqliteRemoteDatabase) {
+ Object.assign(db, {
+ transaction() {
+ throw new Error(
+ '`db.transaction()` is not currently supported. We recommend `db.batch()` for automatic error rollbacks across multiple queries.',
+ );
+ },
+ });
+}
+
+type LocalDbClientOptions = {
+ dbUrl: string;
+ enableTransactions: boolean;
+};
+
+export function createLocalDatabaseClient(options: LocalDbClientOptions): LibSQLDatabase {
+ const url = isWebContainer ? 'file:content.db' : options.dbUrl;
+ const client = createClient({ url });
+ const db = drizzleLibsql(client);
+
+ if (!options.enableTransactions) {
+ applyTransactionNotSupported(db);
+ }
+ return db;
+}
+
+const remoteResultSchema = z.object({
+ columns: z.array(z.string()),
+ columnTypes: z.array(z.string()),
+ rows: z.array(z.array(z.unknown())),
+ rowsAffected: z.number(),
+ lastInsertRowid: z.unknown().optional(),
+});
+
+type RemoteDbClientOptions = {
+ dbType: 'studio' | 'libsql';
+ appToken: string;
+ remoteUrl: string | URL;
+};
+
+export function createRemoteDatabaseClient(options: RemoteDbClientOptions) {
+ const remoteUrl = new URL(options.remoteUrl);
+
+ return options.dbType === 'studio'
+ ? createStudioDatabaseClient(options.appToken, remoteUrl)
+ : createRemoteLibSQLClient(options.appToken, remoteUrl, options.remoteUrl.toString());
+}
+
+function createRemoteLibSQLClient(appToken: string, remoteDbURL: URL, rawUrl: string) {
+ const options: Partial<LibSQLConfig> = Object.fromEntries(remoteDbURL.searchParams.entries());
+ remoteDbURL.search = '';
+
+ let url = remoteDbURL.toString();
+ if (remoteDbURL.protocol === 'memory:') {
+ // libSQL expects a special string in place of a URL
+ // for in-memory DBs.
+ url = ':memory:';
+ } else if (
+ remoteDbURL.protocol === 'file:' &&
+ remoteDbURL.pathname.startsWith('/') &&
+ !rawUrl.startsWith('file:/')
+ ) {
+ // libSQL accepts relative and absolute file URLs
+ // for local DBs. This doesn't match the URL specification.
+ // Parsing `file:some.db` and `file:/some.db` should yield
+ // the same result, but libSQL interprets the former as
+ // a relative path, and the latter as an absolute path.
+ // This detects when such a conversion happened during parsing
+ // and undoes it so that the URL given to libSQL is the
+ // same as given by the user.
+ url = 'file:' + remoteDbURL.pathname.substring(1);
+ }
+
+ const client = createClient({
+ ...options,
+ authToken: appToken,
+ url,
+ });
+ return drizzleLibsql(client);
+}
+
+function createStudioDatabaseClient(appToken: string, remoteDbURL: URL) {
+ if (appToken == null) {
+ throw new Error(`Cannot create a remote client: missing app token.`);
+ }
+
+ const url = new URL('/db/query', remoteDbURL);
+
+ const db = drizzleProxy(
+ async (sql, parameters, method) => {
+ const requestBody: InStatement = { sql, args: parameters };
+ const res = await safeFetch(
+ url,
+ {
+ method: 'POST',
+ headers: {
+ Authorization: `Bearer ${appToken}`,
+ 'Content-Type': 'application/json',
+ },
+ body: JSON.stringify(requestBody),
+ },
+ async (response) => {
+ throw await parseRemoteError(response);
+ },
+ );
+
+ let remoteResult: z.infer<typeof remoteResultSchema>;
+ try {
+ const json = await res.json();
+ remoteResult = remoteResultSchema.parse(json);
+ } catch {
+ throw new DetailedLibsqlError({
+ message: await getUnexpectedResponseMessage(res),
+ code: KNOWN_ERROR_CODES.SQL_QUERY_FAILED,
+ });
+ }
+
+ if (method === 'run') {
+ const rawRows = Array.from(remoteResult.rows);
+ // Implement basic `toJSON()` for Drizzle to serialize properly
+ (remoteResult as any).rows.toJSON = () => rawRows;
+ // Using `db.run()` drizzle massages the rows into an object.
+ // So in order to make dev/prod consistent, we need to do the same here.
+ // This creates an object and loops over each row replacing it with the object.
+ for (let i = 0; i < remoteResult.rows.length; i++) {
+ let row = remoteResult.rows[i];
+ let item: Record<string, any> = {};
+ remoteResult.columns.forEach((col, index) => {
+ item[col] = row[index];
+ });
+ (remoteResult as any).rows[i] = item;
+ }
+ return remoteResult;
+ }
+
+ // Drizzle expects each row as an array of its values
+ const rowValues: unknown[][] = [];
+
+ for (const row of remoteResult.rows) {
+ if (row != null && typeof row === 'object') {
+ rowValues.push(Object.values(row));
+ }
+ }
+
+ if (method === 'get') {
+ return { rows: rowValues[0] };
+ }
+
+ return { rows: rowValues };
+ },
+ async (queries) => {
+ const stmts: InStatement[] = queries.map(({ sql, params }) => ({ sql, args: params }));
+ const res = await safeFetch(
+ url,
+ {
+ method: 'POST',
+ headers: {
+ Authorization: `Bearer ${appToken}`,
+ 'Content-Type': 'application/json',
+ },
+ body: JSON.stringify(stmts),
+ },
+ async (response) => {
+ throw await parseRemoteError(response);
+ },
+ );
+
+ let remoteResults: z.infer<typeof remoteResultSchema>[];
+ try {
+ const json = await res.json();
+ remoteResults = z.array(remoteResultSchema).parse(json);
+ } catch {
+ throw new DetailedLibsqlError({
+ message: await getUnexpectedResponseMessage(res),
+ code: KNOWN_ERROR_CODES.SQL_QUERY_FAILED,
+ });
+ }
+ let results: any[] = [];
+ for (const [idx, rawResult] of remoteResults.entries()) {
+ if (queries[idx]?.method === 'run') {
+ results.push(rawResult);
+ continue;
+ }
+
+ // Drizzle expects each row as an array of its values
+ const rowValues: unknown[][] = [];
+
+ for (const row of rawResult.rows) {
+ if (row != null && typeof row === 'object') {
+ rowValues.push(Object.values(row));
+ }
+ }
+
+ if (queries[idx]?.method === 'get') {
+ results.push({ rows: rowValues[0] });
+ }
+
+ results.push({ rows: rowValues });
+ }
+ return results;
+ },
+ );
+ applyTransactionNotSupported(db);
+ return db;
+}
+
+const errorSchema = z.object({
+ success: z.boolean(),
+ error: z.object({
+ code: z.string(),
+ details: z.string().optional(),
+ }),
+});
+
+const KNOWN_ERROR_CODES = {
+ SQL_QUERY_FAILED: 'SQL_QUERY_FAILED',
+};
+
+const getUnexpectedResponseMessage = async (response: Response) =>
+ `Unexpected response from remote database:\n(Status ${response.status}) ${await response
+ .clone()
+ .text()}`;
+
+async function parseRemoteError(response: Response): Promise<DetailedLibsqlError> {
+ let error;
+ try {
+ error = errorSchema.parse(await response.clone().json()).error;
+ } catch {
+ return new DetailedLibsqlError({
+ message: await getUnexpectedResponseMessage(response),
+ code: KNOWN_ERROR_CODES.SQL_QUERY_FAILED,
+ });
+ }
+ // Strip LibSQL error prefixes
+ let baseDetails =
+ error.details?.replace(/.*SQLite error: /, '') ?? 'Error querying remote database.';
+ // Remove duplicated "code" in details
+ const details = baseDetails.slice(baseDetails.indexOf(':') + 1).trim();
+ let hint = `See the Astro DB guide for query and push instructions: https://docs.astro.build/en/guides/astro-db/#query-your-database`;
+ if (error.code === KNOWN_ERROR_CODES.SQL_QUERY_FAILED && details.includes('no such table')) {
+ hint = `Did you run \`astro db push\` to push your latest table schemas?`;
+ }
+ return new DetailedLibsqlError({ message: details, code: error.code, hint });
+}
diff --git a/packages/db/src/runtime/errors.ts b/packages/db/src/runtime/errors.ts
new file mode 100644
index 000000000..e15044261
--- /dev/null
+++ b/packages/db/src/runtime/errors.ts
@@ -0,0 +1,29 @@
+import { bold } from 'kleur/colors';
+
+export const FOREIGN_KEY_DNE_ERROR = (tableName: string) => {
+ return `Table ${bold(
+ tableName,
+ )} references a table that does not exist. Did you apply the referenced table to the \`tables\` object in your db config?`;
+};
+
+export const FOREIGN_KEY_REFERENCES_LENGTH_ERROR = (tableName: string) => {
+ return `Foreign key on ${bold(
+ tableName,
+ )} is misconfigured. \`columns\` and \`references\` must be the same length.`;
+};
+
+export const FOREIGN_KEY_REFERENCES_EMPTY_ERROR = (tableName: string) => {
+ return `Foreign key on ${bold(
+ tableName,
+ )} is misconfigured. \`references\` array cannot be empty.`;
+};
+
+export const REFERENCE_DNE_ERROR = (columnName: string) => {
+ return `Column ${bold(
+ columnName,
+ )} references a table that does not exist. Did you apply the referenced table to the \`tables\` object in your db config?`;
+};
+
+export const SEED_DEFAULT_EXPORT_ERROR = (fileName: string) => {
+ return `Missing default function export in ${bold(fileName)}`;
+};
diff --git a/packages/db/src/runtime/index.ts b/packages/db/src/runtime/index.ts
new file mode 100644
index 000000000..fb8579459
--- /dev/null
+++ b/packages/db/src/runtime/index.ts
@@ -0,0 +1,157 @@
+import { type ColumnBuilderBaseConfig, type ColumnDataType, sql } from 'drizzle-orm';
+import type { LibSQLDatabase } from 'drizzle-orm/libsql';
+import {
+ type IndexBuilder,
+ type SQLiteColumnBuilderBase,
+ customType,
+ index,
+ integer,
+ sqliteTable,
+ text,
+} from 'drizzle-orm/sqlite-core';
+import type { DBColumn, DBTable } from '../core/types.js';
+import { type SerializedSQL, isSerializedSQL } from './types.js';
+import { pathToFileURL } from './utils.js';
+export type Database = Omit<LibSQLDatabase, 'transaction'>;
+export type { Table } from './types.js';
+export { createRemoteDatabaseClient, createLocalDatabaseClient } from './db-client.js';
+
+export function hasPrimaryKey(column: DBColumn) {
+ return 'primaryKey' in column.schema && !!column.schema.primaryKey;
+}
+
+// Taken from:
+// https://stackoverflow.com/questions/52869695/check-if-a-date-string-is-in-iso-and-utc-format
+const isISODateString = (str: string) => /\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}.\d{3}Z/.test(str);
+
+const dateType = customType<{ data: Date; driverData: string }>({
+ dataType() {
+ return 'text';
+ },
+ toDriver(value) {
+ return value.toISOString();
+ },
+ fromDriver(value) {
+ if (!isISODateString(value)) {
+ // values saved using CURRENT_TIMESTAMP are not valid ISO strings
+ // but *are* in UTC, so append the UTC zone.
+ value += 'Z';
+ }
+ return new Date(value);
+ },
+});
+
+const jsonType = customType<{ data: unknown; driverData: string }>({
+ dataType() {
+ return 'text';
+ },
+ toDriver(value) {
+ return JSON.stringify(value);
+ },
+ fromDriver(value) {
+ return JSON.parse(value);
+ },
+});
+
+type D1ColumnBuilder = SQLiteColumnBuilderBase<
+ ColumnBuilderBaseConfig<ColumnDataType, string> & { data: unknown }
+>;
+
+export function asDrizzleTable(name: string, table: DBTable) {
+ const columns: Record<string, D1ColumnBuilder> = {};
+ if (!Object.entries(table.columns).some(([, column]) => hasPrimaryKey(column))) {
+ columns['_id'] = integer('_id').primaryKey();
+ }
+ for (const [columnName, column] of Object.entries(table.columns)) {
+ columns[columnName] = columnMapper(columnName, column);
+ }
+ const drizzleTable = sqliteTable(name, columns, (ormTable) => {
+ const indexes: Record<string, IndexBuilder> = {};
+ for (const [indexName, indexProps] of Object.entries(table.indexes ?? {})) {
+ const onColNames = Array.isArray(indexProps.on) ? indexProps.on : [indexProps.on];
+ const onCols = onColNames.map((colName) => ormTable[colName]);
+ if (!atLeastOne(onCols)) continue;
+
+ indexes[indexName] = index(indexName).on(...onCols);
+ }
+ return indexes;
+ });
+ return drizzleTable;
+}
+
+function atLeastOne<T>(arr: T[]): arr is [T, ...T[]] {
+ return arr.length > 0;
+}
+
+function columnMapper(columnName: string, column: DBColumn) {
+ let c: ReturnType<
+ | typeof text
+ | typeof integer
+ | typeof jsonType
+ | typeof dateType
+ | typeof integer<string, 'boolean'>
+ >;
+
+ switch (column.type) {
+ case 'text': {
+ c = text(columnName);
+ // Duplicate default logic across cases to preserve type inference.
+ // No clean generic for every column builder.
+ if (column.schema.default !== undefined)
+ c = c.default(handleSerializedSQL(column.schema.default));
+ if (column.schema.primaryKey === true) c = c.primaryKey();
+ break;
+ }
+ case 'number': {
+ c = integer(columnName);
+ if (column.schema.default !== undefined)
+ c = c.default(handleSerializedSQL(column.schema.default));
+ if (column.schema.primaryKey === true) c = c.primaryKey();
+ break;
+ }
+ case 'boolean': {
+ c = integer(columnName, { mode: 'boolean' });
+ if (column.schema.default !== undefined)
+ c = c.default(handleSerializedSQL(column.schema.default));
+ break;
+ }
+ case 'json':
+ c = jsonType(columnName);
+ if (column.schema.default !== undefined) c = c.default(column.schema.default);
+ break;
+ case 'date': {
+ c = dateType(columnName);
+ if (column.schema.default !== undefined) {
+ const def = handleSerializedSQL(column.schema.default);
+ c = c.default(typeof def === 'string' ? new Date(def) : def);
+ }
+ break;
+ }
+ }
+
+ if (!column.schema.optional) c = c.notNull();
+ if (column.schema.unique) c = c.unique();
+ return c;
+}
+
+function handleSerializedSQL<T>(def: T | SerializedSQL) {
+ if (isSerializedSQL(def)) {
+ return sql.raw(def.sql);
+ }
+ return def;
+}
+
+export function normalizeDatabaseUrl(envDbUrl: string | undefined, defaultDbUrl: string): string {
+ if (envDbUrl) {
+ // This could be a file URL, or more likely a root-relative file path.
+ // Convert it to a file URL.
+ if (envDbUrl.startsWith('file://')) {
+ return envDbUrl;
+ }
+
+ return new URL(envDbUrl, pathToFileURL(process.cwd()) + '/').toString();
+ } else {
+ // This is going to be a file URL always,
+ return defaultDbUrl;
+ }
+}
diff --git a/packages/db/src/runtime/types.ts b/packages/db/src/runtime/types.ts
new file mode 100644
index 000000000..08ab16a0c
--- /dev/null
+++ b/packages/db/src/runtime/types.ts
@@ -0,0 +1,109 @@
+import type { ColumnBaseConfig, ColumnDataType } from 'drizzle-orm';
+import type { SQLiteColumn, SQLiteTableWithColumns } from 'drizzle-orm/sqlite-core';
+import type { ColumnsConfig, DBColumn, OutputColumnsConfig } from '../core/types.js';
+
+type GeneratedConfig<T extends ColumnDataType = ColumnDataType> = Pick<
+ ColumnBaseConfig<T, string>,
+ 'name' | 'tableName' | 'notNull' | 'hasDefault'
+>;
+
+export type AstroText<T extends GeneratedConfig<'string'>> = SQLiteColumn<
+ T & {
+ data: string;
+ dataType: 'string';
+ columnType: 'SQLiteText';
+ driverParam: string;
+ enumValues: never;
+ baseColumn: never;
+ }
+>;
+
+export type AstroDate<T extends GeneratedConfig<'custom'>> = SQLiteColumn<
+ T & {
+ data: Date;
+ dataType: 'custom';
+ columnType: 'SQLiteCustomColumn';
+ driverParam: string;
+ enumValues: never;
+ baseColumn: never;
+ }
+>;
+
+export type AstroBoolean<T extends GeneratedConfig<'boolean'>> = SQLiteColumn<
+ T & {
+ data: boolean;
+ dataType: 'boolean';
+ columnType: 'SQLiteBoolean';
+ driverParam: number;
+ enumValues: never;
+ baseColumn: never;
+ }
+>;
+
+export type AstroNumber<T extends GeneratedConfig<'number'>> = SQLiteColumn<
+ T & {
+ data: number;
+ dataType: 'number';
+ columnType: 'SQLiteInteger';
+ driverParam: number;
+ enumValues: never;
+ baseColumn: never;
+ }
+>;
+
+export type AstroJson<T extends GeneratedConfig<'custom'>> = SQLiteColumn<
+ T & {
+ data: unknown;
+ dataType: 'custom';
+ columnType: 'SQLiteCustomColumn';
+ driverParam: string;
+ enumValues: never;
+ baseColumn: never;
+ }
+>;
+
+export type Column<T extends DBColumn['type'], S extends GeneratedConfig> = T extends 'boolean'
+ ? AstroBoolean<S>
+ : T extends 'number'
+ ? AstroNumber<S>
+ : T extends 'text'
+ ? AstroText<S>
+ : T extends 'date'
+ ? AstroDate<S>
+ : T extends 'json'
+ ? AstroJson<S>
+ : never;
+
+export type Table<
+ TTableName extends string,
+ TColumns extends OutputColumnsConfig | ColumnsConfig,
+> = SQLiteTableWithColumns<{
+ name: TTableName;
+ schema: undefined;
+ dialect: 'sqlite';
+ columns: {
+ [K in Extract<keyof TColumns, string>]: Column<
+ TColumns[K]['type'],
+ {
+ tableName: TTableName;
+ name: K;
+ hasDefault: TColumns[K]['schema'] extends { default: NonNullable<unknown> }
+ ? true
+ : TColumns[K]['schema'] extends { primaryKey: true }
+ ? true
+ : false;
+ notNull: TColumns[K]['schema']['optional'] extends true ? false : true;
+ }
+ >;
+ };
+}>;
+
+export const SERIALIZED_SQL_KEY = '__serializedSQL';
+export type SerializedSQL = {
+ [SERIALIZED_SQL_KEY]: true;
+ sql: string;
+};
+
+export function isSerializedSQL(value: any): value is SerializedSQL {
+ return typeof value === 'object' && value !== null && SERIALIZED_SQL_KEY in value;
+}
diff --git a/packages/db/src/runtime/utils.ts b/packages/db/src/runtime/utils.ts
new file mode 100644
index 000000000..9a979b062
--- /dev/null
+++ b/packages/db/src/runtime/utils.ts
@@ -0,0 +1,71 @@
+import { LibsqlError } from '@libsql/client';
+import { AstroError } from 'astro/errors';
+
+const isWindows = process?.platform === 'win32';
+
+/**
+ * Small wrapper around fetch that throws an error if the response is not OK. Allows for custom error handling as well through the onNotOK callback.
+ */
+export async function safeFetch(
+ url: Parameters<typeof fetch>[0],
+ options: Parameters<typeof fetch>[1] = {},
+ onNotOK: (response: Response) => void | Promise<void> = () => {
+ throw new Error(`Request to ${url} returned a non-OK status code.`);
+ },
+): Promise<Response> {
+ const response = await fetch(url, options);
+
+ if (!response.ok) {
+ await onNotOK(response);
+ }
+
+ return response;
+}
+
+export class AstroDbError extends AstroError {
+ name = 'Astro DB Error';
+}
+
+export class DetailedLibsqlError extends LibsqlError {
+ name = 'Astro DB Error';
+ hint?: string;
+
+ constructor({
+ message,
+ code,
+ hint,
+ rawCode,
+ cause,
+ }: { message: string; code: string; hint?: string; rawCode?: number; cause?: Error }) {
+ super(message, code, rawCode, cause);
+ this.hint = hint;
+ }
+}
+
+export function isDbError(err: unknown): err is LibsqlError {
+ return err instanceof LibsqlError || (err instanceof Error && (err as any).libsqlError === true);
+}
+
+function slash(path: string) {
+ const isExtendedLengthPath = path.startsWith('\\\\?\\');
+
+ if (isExtendedLengthPath) {
+ return path;
+ }
+
+ return path.replace(/\\/g, '/');
+}
+
+export function pathToFileURL(path: string): URL {
+ if (isWindows) {
+ let slashed = slash(path);
+ // Windows like C:/foo/bar
+ if (!slashed.startsWith('/')) {
+ slashed = '/' + slashed;
+ }
+ return new URL('file://' + slashed);
+ }
+
+ // Unix is easy
+ return new URL('file://' + path);
+}
diff --git a/packages/db/src/runtime/virtual.ts b/packages/db/src/runtime/virtual.ts
new file mode 100644
index 000000000..5f17823a8
--- /dev/null
+++ b/packages/db/src/runtime/virtual.ts
@@ -0,0 +1,88 @@
+import { sql as _sql } from 'drizzle-orm';
+import type {
+ BooleanColumnInput,
+ ColumnsConfig,
+ DBConfigInput,
+ DateColumnInput,
+ JsonColumnInput,
+ NumberColumnOpts,
+ TableConfig,
+ TextColumnOpts,
+} from '../core/types.js';
+
+function createColumn<S extends string, T extends Record<string, unknown>>(type: S, schema: T) {
+ return {
+ type,
+ /**
+ * @internal
+ */
+ schema,
+ };
+}
+
+export const column = {
+ number: <T extends NumberColumnOpts>(opts: T = {} as T) => {
+ return createColumn('number', opts) satisfies { type: 'number' };
+ },
+ boolean: <T extends BooleanColumnInput['schema']>(opts: T = {} as T) => {
+ return createColumn('boolean', opts) satisfies { type: 'boolean' };
+ },
+ text: <T extends TextColumnOpts>(opts: T = {} as T) => {
+ return createColumn('text', opts) satisfies { type: 'text' };
+ },
+ date<T extends DateColumnInput['schema']>(opts: T = {} as T) {
+ return createColumn('date', opts) satisfies { type: 'date' };
+ },
+ json<T extends JsonColumnInput['schema']>(opts: T = {} as T) {
+ return createColumn('json', opts) satisfies { type: 'json' };
+ },
+};
+
+export function defineTable<TColumns extends ColumnsConfig>(userConfig: TableConfig<TColumns>) {
+ return userConfig;
+}
+
+export function defineDb(userConfig: DBConfigInput) {
+ return userConfig;
+}
+
+// Exports a few common expressions
+export const NOW = _sql`CURRENT_TIMESTAMP`;
+export const TRUE = _sql`TRUE`;
+export const FALSE = _sql`FALSE`;
+
+export {
+ sql,
+ eq,
+ gt,
+ gte,
+ lt,
+ lte,
+ ne,
+ isNull,
+ isNotNull,
+ inArray,
+ notInArray,
+ exists,
+ notExists,
+ between,
+ notBetween,
+ like,
+ notIlike,
+ not,
+ asc,
+ desc,
+ and,
+ or,
+ count,
+ countDistinct,
+ avg,
+ avgDistinct,
+ sum,
+ sumDistinct,
+ max,
+ min,
+} from 'drizzle-orm';
+
+export { alias } from 'drizzle-orm/sqlite-core';
+export { isDbError } from './utils.js';
diff --git a/packages/db/src/utils.ts b/packages/db/src/utils.ts
new file mode 100644
index 000000000..0f244cd00
--- /dev/null
+++ b/packages/db/src/utils.ts
@@ -0,0 +1,14 @@
+export { defineDbIntegration } from './core/utils.js';
+import { tableSchema } from './core/schemas.js';
+import type { ColumnsConfig, TableConfig } from './core/types.js';
+import { type Table, asDrizzleTable as internal_asDrizzleTable } from './runtime/index.js';
+
+export function asDrizzleTable<
+ TableName extends string = string,
+ TColumns extends ColumnsConfig = ColumnsConfig,
+>(name: TableName, tableConfig: TableConfig<TColumns>) {
+ return internal_asDrizzleTable(name, tableSchema.parse(tableConfig)) as Table<
+ TableName,
+ TColumns
+ >;
+}