From a1edc10e7c86cf31733f4b1949d6c24d0603e832 Mon Sep 17 00:00:00 2001 From: AndreyHirsa Date: Tue, 3 Feb 2026 17:11:07 +0300 Subject: [PATCH 1/5] feat(new-compiler): migrate metadata storage from lockfile to LMDB --- .changeset/common-teeth-reply.md | 9 + packages/new-compiler/README.md | 5 +- .../docs/TRANSLATION_ARCHITECTURE.md | 6 +- packages/new-compiler/package.json | 3 +- .../new-compiler/src/metadata/manager.test.ts | 386 ++++++++++++++++++ .../src/metadata/manager.test.worker.ts | 87 ++++ packages/new-compiler/src/metadata/manager.ts | 310 ++++++++------ .../src/plugin/build-translator.ts | 2 +- packages/new-compiler/src/plugin/next.ts | 4 +- .../src/plugin/transform/TESTING.md | 3 +- .../transform/TRANSFORMATION_PIPELINE.md | 89 ++-- packages/new-compiler/src/plugin/unplugin.ts | 5 +- .../src/translation-server/README.md | 2 +- .../translation-server/translation-server.ts | 10 +- pnpm-lock.yaml | 213 ++++++++-- 15 files changed, 901 insertions(+), 233 deletions(-) create mode 100644 .changeset/common-teeth-reply.md create mode 100644 packages/new-compiler/src/metadata/manager.test.ts create mode 100644 packages/new-compiler/src/metadata/manager.test.worker.ts diff --git a/.changeset/common-teeth-reply.md b/.changeset/common-teeth-reply.md new file mode 100644 index 000000000..fec8b4bc6 --- /dev/null +++ b/.changeset/common-teeth-reply.md @@ -0,0 +1,9 @@ +--- +"@lingo.dev/compiler": patch +--- + +- Migrate metadata storage from JSON files to LMDB +- New storage locations: .lingo/metadata-dev/ and .lingo/metadata-build/ +- Update compiler docs +- Remove proper-lockfile dependency +- New tests for MetadataManager diff --git a/packages/new-compiler/README.md b/packages/new-compiler/README.md index 8bd2cc33f..30db74681 100644 --- a/packages/new-compiler/README.md +++ b/packages/new-compiler/README.md @@ -395,9 +395,10 @@ The compiler is organized into several key modules: #### `src/metadata/` - Translation metadata management -- **`manager.ts`** - CRUD operations for `.lingo/metadata.json` -- Thread-safe metadata file operations with file locking +- **`manager.ts`** - CRUD operations for LMDB metadata database +- Uses LMDB for high-performance key-value storage with built-in concurrency - Manages translation entries with hash-based identifiers +- Stores metadata in `.lingo/metadata-dev/` (development) or `.lingo/metadata-build/` (production) #### `src/translators/` - Translation provider abstraction diff --git a/packages/new-compiler/docs/TRANSLATION_ARCHITECTURE.md b/packages/new-compiler/docs/TRANSLATION_ARCHITECTURE.md index 4476133bc..f7f78058b 100644 --- a/packages/new-compiler/docs/TRANSLATION_ARCHITECTURE.md +++ b/packages/new-compiler/docs/TRANSLATION_ARCHITECTURE.md @@ -7,8 +7,8 @@ metadata management, translation execution, and caching. ## Architectural Principles -1. **Metadata file structure** is only known by: - - Metadata Manager (reads/writes metadata.json) +1. **Metadata storage** is only known by: + - Metadata Manager (reads/writes LMDB database) - Translation Service (orchestrator that coordinates everything) 2. **Translators are stateless** and work with abstract `TranslatableEntry` types @@ -37,7 +37,7 @@ metadata management, translation execution, and caching. ↓ ┌──────────────────────────────────────────────────┐ │ MetadataManager │ -│ - ONLY component that reads/writes metadata.json│ +│ - ONLY component that reads/writes LMDB database│ │ - Provides metadata loading/saving │ │ - Returns TranslationEntry[] │ └────────────────┬─────────────────────────────────┘ diff --git a/packages/new-compiler/package.json b/packages/new-compiler/package.json index a7b2f2bf0..793909c4f 100644 --- a/packages/new-compiler/package.json +++ b/packages/new-compiler/package.json @@ -143,7 +143,6 @@ "@types/babel__traverse": "7.28.0", "@types/ini": "4.1.1", "@types/node": "25.0.3", - "@types/proper-lockfile": "4.1.4", "@types/react": "19.2.7", "@types/react-dom": "19.2.3", "@types/ws": "8.18.1", @@ -178,7 +177,7 @@ "lodash": "4.17.21", "node-machine-id": "1.1.12", "posthog-node": "5.14.0", - "proper-lockfile": "4.1.2", + "lmdb": "3.2.6", "ws": "8.18.3" }, "peerDependencies": { diff --git a/packages/new-compiler/src/metadata/manager.test.ts b/packages/new-compiler/src/metadata/manager.test.ts new file mode 100644 index 000000000..cc6794061 --- /dev/null +++ b/packages/new-compiler/src/metadata/manager.test.ts @@ -0,0 +1,386 @@ +import { describe, it, expect, beforeEach, afterEach } from "vitest"; +import fs from "fs"; +import path from "path"; +import os from "os"; +import { fork, type ChildProcess } from "child_process"; +import { + MetadataManager, + createEmptyMetadata, + loadMetadata, + cleanupExistingMetadata, + getMetadataPath, +} from "./manager"; +import type { TranslationEntry } from "../types"; + +// Worker helper for multi-process tests +const WORKER_PATH = path.join(__dirname, "manager.test.worker.ts"); + +interface WorkerMessage { + type: "write" | "read" | "write-batch"; + dbPath: string; + workerId?: string; + entries?: Array<{ hash: string; sourceText?: string }>; +} + +interface WorkerResponse { + type: "success" | "error" | "ready"; + workerId?: string; + totalEntries?: number; + entries?: Record; + error?: string; +} + +function spawnWorker(): Promise { + return new Promise((resolve, reject) => { + const worker = fork(WORKER_PATH, [], { + execPath: path.join(__dirname, "../../node_modules/.bin/tsx"), + stdio: ["pipe", "pipe", "pipe", "ipc"], + }); + + const timeout = setTimeout(() => { + worker.kill(); + reject(new Error("Worker spawn timeout")); + }, 5000); + + worker.once("message", (msg: WorkerResponse) => { + clearTimeout(timeout); + if (msg.type === "ready") { + resolve(worker); + } else { + reject(new Error("Worker did not signal ready")); + } + }); + + worker.once("error", (err) => { + clearTimeout(timeout); + reject(err); + }); + }); +} + +function sendToWorker( + worker: ChildProcess, + message: WorkerMessage, +): Promise { + return new Promise((resolve, reject) => { + const timeout = setTimeout(() => reject(new Error("Worker timeout")), 10000); + worker.once("message", (response: WorkerResponse) => { + clearTimeout(timeout); + resolve(response); + }); + worker.send(message); + }); +} + +function createTestEntry( + overrides: Partial & { + hash?: string; + sourceText?: string; + } = {}, +): TranslationEntry { + const hash = overrides.hash ?? `hash_${Math.random().toString(36).slice(2)}`; + return { + type: "content", + hash, + sourceText: overrides.sourceText ?? `Source text for ${hash}`, + context: { filePath: "test.tsx", componentName: "TestComponent" }, + location: { filePath: "test.tsx", line: 1, column: 1 }, + ...overrides, + } as TranslationEntry; +} + +function createUniqueDbPath(): string { + return path.join( + os.tmpdir(), + `lmdb-test-${Date.now()}-${Math.random().toString(36).slice(2)}`, + ); +} + +describe("MetadataManager", () => { + let testDbPath: string; + + beforeEach(() => { + testDbPath = createUniqueDbPath(); + }); + + afterEach(async () => { + await cleanupExistingMetadata(testDbPath); + }); + + describe("createEmptyMetadata", () => { + it("should return valid empty metadata structure", () => { + const metadata = createEmptyMetadata(); + + expect(metadata.entries).toEqual({}); + expect(metadata.stats!.totalEntries).toBe(0); + // Verify valid ISO date + const date = new Date(metadata.stats!.lastUpdated); + expect(date.getTime()).not.toBeNaN(); + }); + }); + + describe("loadMetadata", () => { + it("should return empty metadata for new database", () => { + const metadata = loadMetadata(testDbPath); + expect(metadata.entries).toEqual({}); + expect(metadata.stats!.totalEntries).toBe(0); + }); + + it("should load and preserve all entry fields", async () => { + const manager = new MetadataManager(testDbPath); + const entry: TranslationEntry = { + type: "content", + hash: "full-entry", + sourceText: "Hello world", + context: { filePath: "app.tsx", componentName: "AppComponent" }, + location: { filePath: "app.tsx", line: 42, column: 10 }, + }; + + await manager.saveMetadataWithEntries([entry]); + const metadata = loadMetadata(testDbPath); + + expect(metadata.entries["full-entry"]).toEqual(entry); + expect(metadata.stats!.totalEntries).toBe(1); + }); + + it("should handle entries with very long sourceText", async () => { + const manager = new MetadataManager(testDbPath); + const longText = "A".repeat(100000); + await manager.saveMetadataWithEntries([ + createTestEntry({ hash: "long-text", sourceText: longText }), + ]); + + const metadata = manager.loadMetadata(); + expect(metadata.entries["long-text"].sourceText).toBe(longText); + }); + }); + + describe("saveMetadataWithEntries", () => { + it("should save, accumulate, and update entries correctly", async () => { + const manager = new MetadataManager(testDbPath); + + // Save single entry + await manager.saveMetadataWithEntries([ + createTestEntry({ hash: "entry-1", sourceText: "v1" }), + ]); + expect(manager.loadMetadata().stats!.totalEntries).toBe(1); + + // Accumulate multiple entries + await manager.saveMetadataWithEntries([ + createTestEntry({ hash: "entry-2" }), + createTestEntry({ hash: "entry-3" }), + ]); + expect(manager.loadMetadata().stats!.totalEntries).toBe(3); + + // Update existing entry (count should not increase) + const result = await manager.saveMetadataWithEntries([ + createTestEntry({ hash: "entry-1", sourceText: "v2" }), + ]); + expect(result.stats!.totalEntries).toBe(3); + expect(result.entries["entry-1"].sourceText).toBe("v2"); + + // Empty array should not change anything + await manager.saveMetadataWithEntries([]); + expect(manager.loadMetadata().stats!.totalEntries).toBe(3); + }); + + it("should handle large batch of entries", async () => { + const manager = new MetadataManager(testDbPath); + const entries = Array.from({ length: 1000 }, (_, i) => + createTestEntry({ hash: `batch-${i}` }), + ); + + const result = await manager.saveMetadataWithEntries(entries); + expect(result.stats!.totalEntries).toBe(1000); + }); + + it("should maintain data integrity after many operations", async () => { + const manager = new MetadataManager(testDbPath); + + // Many saves with overlapping keys + for (let i = 0; i < 50; i++) { + await manager.saveMetadataWithEntries([ + createTestEntry({ hash: `persistent-${i % 10}`, sourceText: `v${i}` }), + createTestEntry({ hash: `unique-${i}` }), + ]); + } + + const final = manager.loadMetadata(); + // 10 persistent + 50 unique = 60 + expect(final.stats!.totalEntries).toBe(60); + + // Verify save result matches load result + const saveResult = await manager.saveMetadataWithEntries([]); + expect(saveResult.stats!.totalEntries).toBe(final.stats!.totalEntries); + }); + }); + + describe("concurrent access (single process)", () => { + it("should handle concurrent operations from multiple manager instances", async () => { + const manager1 = new MetadataManager(testDbPath); + const manager2 = new MetadataManager(testDbPath); + + // Concurrent writes + const promises = Array.from({ length: 20 }, (_, i) => + (i % 2 === 0 ? manager1 : manager2).saveMetadataWithEntries([ + createTestEntry({ hash: `concurrent-${i}` }), + ]), + ); + await Promise.all(promises); + + // Both managers should see all entries + expect(manager1.loadMetadata().stats!.totalEntries).toBe(20); + expect(manager2.loadMetadata().stats!.totalEntries).toBe(20); + }); + }); + + describe("cleanupExistingMetadata", () => { + it("should remove database and allow reopening with fresh state", async () => { + const manager1 = new MetadataManager(testDbPath); + await manager1.saveMetadataWithEntries([createTestEntry({ hash: "before" })]); + expect(fs.existsSync(testDbPath)).toBe(true); + + await cleanupExistingMetadata(testDbPath); + expect(fs.existsSync(testDbPath)).toBe(false); + + // Should work with fresh state after cleanup + const manager2 = new MetadataManager(testDbPath); + const metadata = manager2.loadMetadata(); + expect(metadata.entries["before"]).toBeUndefined(); + expect(metadata.stats!.totalEntries).toBe(0); + }); + + it("should handle non-existent path and multiple calls gracefully", () => { + const nonExistent = path.join(os.tmpdir(), "does-not-exist-db"); + expect(() => cleanupExistingMetadata(nonExistent)).not.toThrow(); + expect(() => cleanupExistingMetadata(nonExistent)).not.toThrow(); + }); + }); + + describe("getMetadataPath", () => { + it("should return correct path based on environment and config", () => { + const devResult = getMetadataPath({ + sourceRoot: "/app", + lingoDir: ".lingo", + environment: "development", + }); + expect(devResult).toContain("metadata-dev"); + expect(devResult).not.toContain("metadata-build"); + + const prodResult = getMetadataPath({ + sourceRoot: "/app", + lingoDir: ".lingo", + environment: "production", + }); + expect(prodResult).toContain("metadata-build"); + + const customResult = getMetadataPath({ + sourceRoot: "/app", + lingoDir: ".custom-lingo", + environment: "development", + }); + expect(customResult).toContain(".custom-lingo"); + }); + }); + + describe("singleton database connection", () => { + it("should close previous db when switching paths", async () => { + const path1 = createUniqueDbPath(); + const path2 = createUniqueDbPath(); + + try { + const manager1 = new MetadataManager(path1); + await manager1.saveMetadataWithEntries([createTestEntry({ hash: "in-path1" })]); + + const manager2 = new MetadataManager(path2); + await manager2.saveMetadataWithEntries([createTestEntry({ hash: "in-path2" })]); + + // Each database has its own data + const result2 = manager2.loadMetadata(); + expect(result2.entries["in-path2"]).toBeDefined(); + expect(result2.entries["in-path1"]).toBeUndefined(); + } finally { + await cleanupExistingMetadata(path1); + await cleanupExistingMetadata(path2); + } + }); + }); + + describe("error handling", () => { + it("should throw descriptive error for invalid path", async () => { + const invalidPath = "/root/definitely/cannot/create/this/path"; + await expect(async () => new MetadataManager(invalidPath)).rejects.toThrow(); + }); + }); + + describe("multi-process scenarios (Next.js-like)", () => { + // These tests spawn actual child processes to simulate Next.js workers. + // Each process has its own singleton DatabaseConnection. + // LMDB handles cross-process concurrency via OS-level locking (MVCC). + + it("should share data between separate processes", async () => { + const worker1 = await spawnWorker(); + const worker2 = await spawnWorker(); + + try { + await sendToWorker(worker1, { + type: "write", + dbPath: testDbPath, + entries: [{ hash: "from-process-1" }], + }); + + await sendToWorker(worker2, { + type: "write", + dbPath: testDbPath, + entries: [{ hash: "from-process-2" }], + }); + + const read = await sendToWorker(worker1, { type: "read", dbPath: testDbPath }); + expect(read.totalEntries).toBe(2); + expect(read.entries?.["from-process-1"]).toBeDefined(); + expect(read.entries?.["from-process-2"]).toBeDefined(); + } finally { + worker1.kill(); + worker2.kill(); + } + }); + + it("should simulate Next.js build: cleanup then multi-worker writes", async () => { + // Pre-populate with "old" data + const setup = new MetadataManager(testDbPath); + await setup.saveMetadataWithEntries([ + createTestEntry({ hash: "old-entry-1" }), + createTestEntry({ hash: "old-entry-2" }), + ]); + + // Main runner cleans up (simulates Next.js build start) + await cleanupExistingMetadata(testDbPath); + + // Spawn workers (simulates Next.js build workers) + const workers = await Promise.all([spawnWorker(), spawnWorker(), spawnWorker()]); + + try { + // Workers write concurrently + await Promise.all( + workers.map((worker, i) => + sendToWorker(worker, { + type: "write-batch", + dbPath: testDbPath, + entries: Array.from({ length: 5 }, (_, j) => ({ + hash: `worker${i}-file${j}`, + })), + }), + ), + ); + + // Main process reads final metadata + const finalMetadata = loadMetadata(testDbPath); + expect(finalMetadata.stats!.totalEntries).toBe(15); + expect(finalMetadata.entries["old-entry-1"]).toBeUndefined(); + expect(finalMetadata.entries["worker0-file0"]).toBeDefined(); + } finally { + workers.forEach((w) => w.kill()); + } + }); + }); +}); diff --git a/packages/new-compiler/src/metadata/manager.test.worker.ts b/packages/new-compiler/src/metadata/manager.test.worker.ts new file mode 100644 index 000000000..5978e7943 --- /dev/null +++ b/packages/new-compiler/src/metadata/manager.test.worker.ts @@ -0,0 +1,87 @@ +/** + * Worker script for multi-process LMDB tests. + * This file is forked by manager.test.ts to simulate Next.js workers. + * + * Each forked process has its own singleton DatabaseConnection, + * testing true cross-process LMDB behavior. + */ +import { MetadataManager, loadMetadata } from "./manager"; +import type { TranslationEntry } from "../types"; + +interface WorkerMessage { + type: "write" | "read" | "write-batch"; + dbPath: string; + workerId?: string; + entries?: Array<{ hash: string; sourceText?: string }>; +} + +interface WorkerResponse { + type: "success" | "error"; + workerId?: string; + totalEntries?: number; + entries?: Record; + error?: string; +} + +function createEntry(hash: string, sourceText?: string): TranslationEntry { + return { + type: "content", + hash, + sourceText: sourceText ?? `Source text for ${hash}`, + context: { + filePath: "test.tsx", + componentName: "TestComponent", + }, + location: { + filePath: "test.tsx", + line: 1, + column: 1, + }, + } as TranslationEntry; +} + +async function handleMessage(message: WorkerMessage): Promise { + try { + const manager = new MetadataManager(message.dbPath); + + if (message.type === "write" || message.type === "write-batch") { + const entries = (message.entries ?? []).map((e) => + createEntry(e.hash, e.sourceText), + ); + await manager.saveMetadataWithEntries(entries); + const metadata = manager.loadMetadata(); + return { + type: "success", + workerId: message.workerId, + totalEntries: metadata.stats?.totalEntries ?? 0, + }; + } + + if (message.type === "read") { + const metadata = loadMetadata(message.dbPath); + return { + type: "success", + workerId: message.workerId, + totalEntries: metadata.stats?.totalEntries ?? 0, + entries: metadata.entries, + }; + } + + return { type: "error", error: "Unknown message type" }; + } catch (error) { + return { + type: "error", + workerId: message.workerId, + error: error instanceof Error ? error.message : String(error), + }; + } +} + +// Handle messages from parent process +process.on("message", async (message: WorkerMessage) => { + const response = await handleMessage(message); + process.send!(response); +}); + +// Signal ready +process.send!({ type: "ready" }); diff --git a/packages/new-compiler/src/metadata/manager.ts b/packages/new-compiler/src/metadata/manager.ts index 216ab7e56..489e826ab 100644 --- a/packages/new-compiler/src/metadata/manager.ts +++ b/packages/new-compiler/src/metadata/manager.ts @@ -1,12 +1,83 @@ -import fsPromises from "fs/promises"; import fs from "fs"; import path from "path"; -import lockfile from "proper-lockfile"; +import { open, type RootDatabase } from "lmdb"; import type { MetadataSchema, PathConfig, TranslationEntry } from "../types"; -import { DEFAULT_TIMEOUTS, withTimeout } from "../utils/timeout"; import { getLingoDir } from "../utils/path-helpers"; import { logger } from "../utils/logger"; +// Special key for storing stats +const STATS_KEY = "__stats__"; + +// Metadata directory names for each environment +const METADATA_DIR_DEV = "metadata-dev"; +const METADATA_DIR_BUILD = "metadata-build"; + +/** + * Singleton class managing the LMDB database connection. + * Only one database is ever open at a time (either dev or build). + * + * Note: In multi-process bundler environments (e.g., Webpack workers), + * each process has its own singleton instance. LMDB handles concurrent + * access across processes safely via OS-level locking (MVCC). + */ +class DatabaseConnection { + private static instance: { db: RootDatabase; path: string } | null = null; + + static get(dbPath: string): RootDatabase { + // Return existing db if same path + if (this.instance?.path === dbPath) { + return this.instance.db; + } + + // Close previous db if different path + if (this.instance) { + try { + this.instance.db.close(); + } catch (e) { + logger.debug(`Error closing previous database: ${e}`); + } + this.instance = null; + } + + fs.mkdirSync(dbPath, { recursive: true }); + + // Build mode: disable fsync - metadata is deleted immediately after build, + // so durability is not needed and this avoids delay on close. + // Dev mode: keep sync enabled for consistency during long-running sessions. + const isBuildMode = dbPath.endsWith(METADATA_DIR_BUILD); + + try { + const db = open({ + path: dbPath, + compression: true, + noSync: isBuildMode, + }); + + this.instance = { db, path: dbPath }; + return db; + } catch (error) { + const message = error instanceof Error ? error.message : String(error); + throw new Error( + `Failed to open LMDB metadata database at ${dbPath}. ` + + `This may be caused by disk space issues, permission problems, or database corruption. ` + + `Try deleting the ${dbPath} directory and rebuilding. ` + + `Original error: ${message}`, + ); + } + } + + static close(dbPath: string): void { + if (this.instance?.path === dbPath) { + try { + this.instance.db.close(); + } catch (e) { + logger.debug(`Error closing database at ${dbPath}: ${e}`); + } + this.instance = null; + } + } +} + export function createEmptyMetadata(): MetadataSchema { return { entries: {}, @@ -17,134 +88,116 @@ export function createEmptyMetadata(): MetadataSchema { }; } -export function loadMetadata(path: string) { - return new MetadataManager(path).loadMetadata(); +export function loadMetadata(dbPath: string): MetadataSchema { + return new MetadataManager(dbPath).loadMetadata(); } -export function cleanupExistingMetadata(metadataFilePath: string) { - // General cleanup. Delete metadata and stop the server if any was started. - logger.debug(`Attempting to cleanup metadata file: ${metadataFilePath}`); - - try { - fs.unlinkSync(metadataFilePath); - logger.info(`🧹 Cleaned up build metadata file: ${metadataFilePath}`); - } catch (error: any) { - // Ignore if file doesn't exist - if (error.code === "ENOENT") { - logger.debug( - `Metadata file already deleted or doesn't exist: ${metadataFilePath}`, - ); - } else { - logger.warn(`Failed to cleanup metadata file: ${error.message}`); +/** + * Synchronous sleep using Atomics.wait(). + * Blocks the thread without consuming CPU cycles. + */ +function sleepSync(ms: number): void { + const sharedBuffer = new SharedArrayBuffer(4); + const int32 = new Int32Array(sharedBuffer); + Atomics.wait(int32, 0, 0, ms); +} + +/** + * Clean up the metadata database directory. + * Synchronous to work with process exit handlers. + * Uses Atomics.wait() for non-busy-wait blocking during retries on Windows. + */ +export function cleanupExistingMetadata(metadataDbPath: string): void { + logger.debug(`Attempting to cleanup metadata database: ${metadataDbPath}`); + + // Close database if open to release locks + DatabaseConnection.close(metadataDbPath); + + // Retry a few times - on Windows, memory-mapped files may be held briefly by other processes + const maxRetries = 5; + + for (let attempt = 1; attempt <= maxRetries; attempt++) { + try { + fs.rmSync(metadataDbPath, { recursive: true, force: true }); + logger.info(`🧹 Cleaned up metadata database: ${metadataDbPath}`); + return; + } catch (error) { + const code = + error instanceof Error && "code" in error + ? (error as NodeJS.ErrnoException).code + : undefined; + const message = error instanceof Error ? error.message : String(error); + + if (code === "ENOENT") { + // Ignore if file doesn't exist + logger.debug( + `Metadata database already deleted or doesn't exist: ${metadataDbPath}`, + ); + return; + } + + const isRetryable = code === "EBUSY" || code === "EPERM"; + if (isRetryable && attempt < maxRetries) { + logger.debug(`Cleanup attempt ${attempt} failed, retrying...`); + sleepSync(200); + continue; + } + + logger.warn(`Failed to cleanup metadata database: ${message}`); } } } /** - * Get the absolute path to the metadata file + * Get the absolute path to the metadata database directory * * @param config - Config with sourceRoot, lingoDir, and environment - * @returns Absolute path to metadata file + * @returns Absolute path to metadata database directory */ export function getMetadataPath(config: PathConfig): string { - const filename = - // Similar to next keeping dev build separate, let's keep the build metadata clean of any dev mode additions + const dirname = config.environment === "development" - ? "metadata-dev.json" - : "metadata-build.json"; - return path.join(getLingoDir(config), filename); + ? METADATA_DIR_DEV + : METADATA_DIR_BUILD; + return path.join(getLingoDir(config), dirname); } export class MetadataManager { - constructor(private readonly filePath: string) {} + private db: RootDatabase; - /** - * Load metadata from disk - * Creates empty metadata if file doesn't exist - * Times out after 15 seconds to prevent indefinite hangs - */ - async loadMetadata(): Promise { - try { - const content = await withTimeout( - fsPromises.readFile(this.filePath, "utf-8"), - DEFAULT_TIMEOUTS.METADATA, - "Load metadata", - ); - return JSON.parse(content) as MetadataSchema; - } catch (error: any) { - if (error.code === "ENOENT") { - // File doesn't exist, create new metadata - return createEmptyMetadata(); - } - throw error; - } + constructor(dbPath: string) { + this.db = DatabaseConnection.get(dbPath); } /** - * Save metadata to disk - * Times out after 15 seconds to prevent indefinite hangs + * Load metadata from LMDB database. + * Returns empty metadata if database is empty. */ - private async saveMetadata(metadata: MetadataSchema): Promise { - await withTimeout( - fsPromises.mkdir(path.dirname(this.filePath), { recursive: true }), - DEFAULT_TIMEOUTS.FILE_IO, - "Create metadata directory", - ); - - metadata.stats = { - totalEntries: Object.keys(metadata.entries).length, - lastUpdated: new Date().toISOString(), - }; - - // Per LLM writing to a file is not an atomic operation while rename is, so nobody should get partial content. - // Sounds reasonable. - const dir = path.dirname(this.filePath); - const base = path.basename(this.filePath); - - // Keep temp file in the same directory to maximize chance that rename is atomic - const tmpPath = path.join(dir, `.${base}.tmp-${process.pid}-${Date.now()}`); - - const json = JSON.stringify(metadata, null, 2); + loadMetadata(): MetadataSchema { + const entries: Record = {}; - await withTimeout( - fsPromises.writeFile(tmpPath, json, "utf-8"), - DEFAULT_TIMEOUTS.METADATA, - "Save metadata (tmp write)", - ); - - try { - // TODO (AleksandrSl 14/12/2025): LLM says that we may want to remove older file first for windows, but it seems lo work fine as is. - await withTimeout( - fsPromises.rename(tmpPath, this.filePath), - DEFAULT_TIMEOUTS.METADATA, - "Save metadata (atomic rename)", - ); - } catch (error) { - // On Windows, rename() can fail with EPERM if something briefly holds the file. - // As a fallback, try writing directly to the destination (not atomic). - if ( - error && - typeof error === "object" && - "code" in error && - error.code === "EPERM" - ) { - await withTimeout( - fsPromises.writeFile(this.filePath, json, "utf-8"), - DEFAULT_TIMEOUTS.METADATA, - "Save metadata (EPERM fallback direct write)", - ); - return; + for (const { key, value } of this.db.getRange()) { + const keyStr = key as string; + if (keyStr !== STATS_KEY) { + entries[keyStr] = value as TranslationEntry; } - throw error; - } finally { - // Best-effort cleanup if rename failed for some reason - await fsPromises.unlink(tmpPath).catch(() => {}); } + + const stats = this.db.get(STATS_KEY) as MetadataSchema["stats"] | undefined; + if (Object.keys(entries).length === 0 && !stats) { + return createEmptyMetadata(); + } + return { + entries, + stats: stats || { + totalEntries: Object.keys(entries).length, + lastUpdated: new Date().toISOString(), + }, + }; } /** - * Thread-safe save operation that atomically updates metadata with new entries - * Uses file locking to prevent concurrent write corruption + * Save entries to database - LMDB handles concurrency via MVCC. * * @param entries - Translation entries to add/update * @returns The updated metadata schema @@ -152,39 +205,26 @@ export class MetadataManager { async saveMetadataWithEntries( entries: TranslationEntry[], ): Promise { - const lockDir = path.dirname(this.filePath); - - await fsPromises.mkdir(lockDir, { recursive: true }); + await this.db.transaction(() => { + for (const entry of entries) { + this.db.put(entry.hash, entry); + } - try { - await fsPromises.access(this.filePath); - } catch { - await fsPromises.writeFile( - this.filePath, - JSON.stringify(createEmptyMetadata(), null, 2), - "utf-8", - ); - } + // Count entries explicitly (excluding stats key) for clarity + let entryCount = 0; + for (const { key } of this.db.getRange()) { + if (key !== STATS_KEY) { + entryCount++; + } + } - const release = await lockfile.lock(this.filePath, { - retries: { - retries: 20, - minTimeout: 50, - maxTimeout: 2000, - }, - stale: 5000, + const stats = { + totalEntries: entryCount, + lastUpdated: new Date().toISOString(), + }; + this.db.put(STATS_KEY, stats); }); - try { - // Re-load metadata inside lock to get latest state - const currentMetadata = await this.loadMetadata(); - for (const entry of entries) { - currentMetadata.entries[entry.hash] = entry; - } - await this.saveMetadata(currentMetadata); - return currentMetadata; - } finally { - await release(); - } + return this.loadMetadata(); } } diff --git a/packages/new-compiler/src/plugin/build-translator.ts b/packages/new-compiler/src/plugin/build-translator.ts index 75a7ef88d..7a04adb2b 100644 --- a/packages/new-compiler/src/plugin/build-translator.ts +++ b/packages/new-compiler/src/plugin/build-translator.ts @@ -64,7 +64,7 @@ export async function processBuildTranslations( logger.info(`🌍 Build mode: ${buildMode}`); - const metadata = await loadMetadata(metadataFilePath); + const metadata = loadMetadata(metadataFilePath); if (!metadata || Object.keys(metadata.entries).length === 0) { logger.info("No translations to process (metadata is empty)"); diff --git a/packages/new-compiler/src/plugin/next.ts b/packages/new-compiler/src/plugin/next.ts index a92545694..c9dadae5c 100644 --- a/packages/new-compiler/src/plugin/next.ts +++ b/packages/new-compiler/src/plugin/next.ts @@ -238,9 +238,7 @@ export async function withLingo( cleanupExistingMetadata(metadataFilePath); registerCleanupOnCurrentProcess({ - cleanup: () => { - cleanupExistingMetadata(metadataFilePath); - }, + cleanup: () => cleanupExistingMetadata(metadataFilePath), }); } diff --git a/packages/new-compiler/src/plugin/transform/TESTING.md b/packages/new-compiler/src/plugin/transform/TESTING.md index fd03927da..f48d77fa5 100644 --- a/packages/new-compiler/src/plugin/transform/TESTING.md +++ b/packages/new-compiler/src/plugin/transform/TESTING.md @@ -66,5 +66,6 @@ After running tests, verify no files were created: ```bash # Should return 0 find compiler/src -name ".lingo" -type d | wc -l -find compiler/src -name "metadata.json" | wc -l +find compiler/src -name "metadata-dev" -type d | wc -l +find compiler/src -name "metadata-build" -type d | wc -l ``` diff --git a/packages/new-compiler/src/plugin/transform/TRANSFORMATION_PIPELINE.md b/packages/new-compiler/src/plugin/transform/TRANSFORMATION_PIPELINE.md index b6ec00798..6f9c3b225 100644 --- a/packages/new-compiler/src/plugin/transform/TRANSFORMATION_PIPELINE.md +++ b/packages/new-compiler/src/plugin/transform/TRANSFORMATION_PIPELINE.md @@ -9,7 +9,7 @@ Source JSX → Babel Parser → AST Transformation → Code Generation → Trans ↓ Metadata Extraction ↓ - .lingo/metadata.json + .lingo/metadata-{env}/ (LMDB database) ``` ## Pipeline Stages @@ -111,12 +111,14 @@ export function Welcome() { // AFTER (Server Component) import { getServerTranslations } from "@lingo.dev/compiler/react/server"; -import __lingoMetadata from "./.lingo/metadata.json"; export async function Welcome() { - const t = await getServerTranslations({ - metadata: __lingoMetadata, - sourceLocale: "en", + // getServerTranslations options: + // - hashes: string[] - Translation hashes needed (injected at build time) + // - locale?: LocaleCode - Target locale (auto-detected if not provided) + // - basePath?: string - Base path for translation files (default: cwd) + const { t } = await getServerTranslations({ + hashes: ["a1b2c3d4e5f6"], }); return
{t("a1b2c3d4e5f6", "Hello World")}
; } @@ -155,38 +157,50 @@ export async function Welcome() { **Purpose**: Track all translatable strings across the application -**Metadata Structure**: +**Storage**: LMDB key-value database in `.lingo/metadata-dev/` or `.lingo/metadata-build/` -```json +**Why LMDB?** +- ~1M ops/sec write speed vs ~50K for SQLite +- Zero-copy reads from memory-mapped files +- Built-in LZ4 compression +- Simple key-value API without SQL overhead + +**Data Structure**: + +Each translation entry is stored with its hash as the key: + +```typescript +// Key: "a1b2c3d4e5f6" +// Value: { - "version": "0.1", - "entries": { - "a1b2c3d4e5f6": { - "sourceText": "Hello World", - "context": { - "componentName": "Welcome", - "filePath": "components/Welcome.tsx", - "line": 3, - "column": 10 - }, - "hash": "a1b2c3d4e5f6", - "addedAt": "2025-01-20T10:00:00.000Z" - } + sourceText: "Hello World", + context: { + componentName: "Welcome", + filePath: "components/Welcome.tsx", + line: 3, + column: 10 }, - "stats": { - "totalEntries": 1, - "lastUpdated": "2025-01-20T10:00:00.000Z" - } + hash: "a1b2c3d4e5f6", + addedAt: "2025-01-20T10:00:00.000Z" +} +``` + +Stats are stored under a special `__stats__` key: + +```typescript +{ + totalEntries: 1, + lastUpdated: "2025-01-20T10:00:00.000Z" } ``` **Operations**: -- `loadMetadata()`: Read existing metadata -- `upsertEntries()`: Add or update translation entries -- `saveMetadata()`: Write metadata to disk +- `loadMetadata()`: Read all entries from LMDB database +- `saveMetadataWithEntries()`: Atomically write entries using LMDB transactions +- `cleanupExistingMetadata()`: Remove the database directory -**Storage**: `{sourceRoot}/.lingo/metadata.json` +**Concurrency**: LMDB handles concurrent access internally (multi-reader, single-writer) --- @@ -241,16 +255,17 @@ After transformation, the runtime provides the actual translation functionality: ### Server Components ```tsx -const t = await getServerTranslations({ - metadata: __lingoMetadata, - sourceLocale: "en", +const { t, locale, translations } = await getServerTranslations({ + hashes: ["hash1", "hash2"], // Injected at build time + locale: "es", // Optional: auto-detected if omitted + basePath: process.cwd(), // Optional: defaults to cwd }); ``` **How it works**: -1. Reads metadata to know what translations are needed -2. Loads translation file for current locale from `.lingo/{locale}.json` +1. Fetches translations for the specified hashes from `.lingo/cache/{locale}.json` +2. Auto-detects locale via configured locale resolver if not provided 3. Returns `t()` function that maps hashes to translated strings 4. Falls back to source text if translation missing @@ -299,7 +314,7 @@ Request locale → Check cache → Generate if missing → Return translations 4. **Fallback Safety**: Transformed code includes original text as fallback, so missing translations don't break the app -5. **Metadata-Driven**: Single source of truth (`metadata.json`) tracks all translatable content +5. **Metadata-Driven**: Single source of truth (LMDB database) tracks all translatable content 6. **Universal Compatibility**: Same transformation logic works across Vite, Webpack, Rollup, esbuild, and Next.js @@ -328,12 +343,10 @@ export function Greeting({ name }) { // 4. TRANSFORMED OUTPUT import { getServerTranslations } from "@lingo.dev/compiler/react/server"; -import __lingoMetadata from "./.lingo/metadata.json"; export async function Greeting({ name }) { - const t = await getServerTranslations({ - metadata: __lingoMetadata, - sourceLocale: "en" + const { t } = await getServerTranslations({ + hashes: ["a1b2c3d4e5f6"], }); return

{t("a1b2c3d4e5f6", "Hello, ")}{name}!

; } diff --git a/packages/new-compiler/src/plugin/unplugin.ts b/packages/new-compiler/src/plugin/unplugin.ts index 629fccd4d..a0f29f68b 100644 --- a/packages/new-compiler/src/plugin/unplugin.ts +++ b/packages/new-compiler/src/plugin/unplugin.ts @@ -381,7 +381,10 @@ export const lingoUnplugin = createUnplugin< trackEvent(TRACKING_EVENTS.BUILD_ERROR, { framework: currentFramework, errorType: "transform", - errorMessage: error instanceof Error ? error.message : "Unknown transform error", + errorMessage: + error instanceof Error + ? error.message + : "Unknown transform error", filePath: id, environment: config.environment, }); diff --git a/packages/new-compiler/src/translation-server/README.md b/packages/new-compiler/src/translation-server/README.md index 13fbae27c..9f7c33757 100644 --- a/packages/new-compiler/src/translation-server/README.md +++ b/packages/new-compiler/src/translation-server/README.md @@ -309,7 +309,7 @@ See commit history and `utils/timeout.ts` for implementation. 1. Start your dev server 2. Files get transformed by the bundler -3. Metadata is generated in `.lingo/metadata.json` +3. Metadata is stored in `.lingo/metadata-dev/` (LMDB database) ## Migration Guide diff --git a/packages/new-compiler/src/translation-server/translation-server.ts b/packages/new-compiler/src/translation-server/translation-server.ts index 7c9695cd6..10db2a873 100644 --- a/packages/new-compiler/src/translation-server/translation-server.ts +++ b/packages/new-compiler/src/translation-server/translation-server.ts @@ -313,9 +313,9 @@ export class TranslationServer { * Reload metadata from disk * Useful when metadata has been updated during runtime (e.g., new transformations) */ - async reloadMetadata(): Promise { + reloadMetadata(): void { try { - this.metadata = await loadMetadata(getMetadataPath(this.config)); + this.metadata = loadMetadata(getMetadataPath(this.config)); this.logger.debug( `Reloaded metadata: ${Object.keys(this.metadata.entries).length} entries`, ); @@ -344,7 +344,7 @@ export class TranslationServer { // Always reload metadata to get the latest entries // This is critical for build-time translation where metadata is updated // continuously as files are transformed - await this.reloadMetadata(); + this.reloadMetadata(); if (!this.metadata) { throw new Error("Failed to load metadata"); @@ -651,7 +651,7 @@ export class TranslationServer { } // Reload metadata to ensure we have the latest entries // (new entries may have been added since server started) - await this.reloadMetadata(); + this.reloadMetadata(); if (!this.metadata) { throw new Error("Failed to load metadata"); @@ -713,7 +713,7 @@ export class TranslationServer { // Reload metadata to ensure we have the latest entries // (new entries may have been added since server started) - await this.reloadMetadata(); + this.reloadMetadata(); if (!this.metadata) { throw new Error("Failed to load metadata"); diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 52d3d6f81..e91bb0928 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -778,6 +778,9 @@ importers: lingo.dev: specifier: workspace:^ version: link:../cli + lmdb: + specifier: 3.2.6 + version: 3.2.6 lodash: specifier: 4.17.21 version: 4.17.21 @@ -787,9 +790,6 @@ importers: posthog-node: specifier: 5.14.0 version: 5.14.0 - proper-lockfile: - specifier: 4.1.2 - version: 4.1.2 react: specifier: ^19.0.0 version: 19.2.3 @@ -818,9 +818,6 @@ importers: '@types/node': specifier: 25.0.3 version: 25.0.3 - '@types/proper-lockfile': - specifier: 4.1.4 - version: 4.1.4 '@types/react': specifier: 19.2.7 version: 19.2.7 @@ -2538,6 +2535,36 @@ packages: '@lingo.dev/_spec@0.44.4': resolution: {integrity: sha512-SJifegtTD+VF0WUZtZVQytCX/YdOSLqCt9WTc0EXIx2t8arPnIgE2+rmPCJ84GYQhFc5BaTbYEo2pQhKQCOMNg==} + '@lmdb/lmdb-darwin-arm64@3.2.6': + resolution: {integrity: sha512-yF/ih9EJJZc72psFQbwnn8mExIWfTnzWJg+N02hnpXtDPETYLmQswIMBn7+V88lfCaFrMozJsUvcEQIkEPU0Gg==} + cpu: [arm64] + os: [darwin] + + '@lmdb/lmdb-darwin-x64@3.2.6': + resolution: {integrity: sha512-5BbCumsFLbCi586Bb1lTWQFkekdQUw8/t8cy++Uq251cl3hbDIGEwD9HAwh8H6IS2F6QA9KdKmO136LmipRNkg==} + cpu: [x64] + os: [darwin] + + '@lmdb/lmdb-linux-arm64@3.2.6': + resolution: {integrity: sha512-l5VmJamJ3nyMmeD1ANBQCQqy7do1ESaJQfKPSm2IG9/ADZryptTyCj8N6QaYgIWewqNUrcbdMkJajRQAt5Qjfg==} + cpu: [arm64] + os: [linux] + + '@lmdb/lmdb-linux-arm@3.2.6': + resolution: {integrity: sha512-+6XgLpMb7HBoWxXj+bLbiiB4s0mRRcDPElnRS3LpWRzdYSe+gFk5MT/4RrVNqd2MESUDmb53NUXw1+BP69bjiQ==} + cpu: [arm] + os: [linux] + + '@lmdb/lmdb-linux-x64@3.2.6': + resolution: {integrity: sha512-nDYT8qN9si5+onHYYaI4DiauDMx24OAiuZAUsEqrDy+ja/3EbpXPX/VAkMV8AEaQhy3xc4dRC+KcYIvOFefJ4Q==} + cpu: [x64] + os: [linux] + + '@lmdb/lmdb-win32-x64@3.2.6': + resolution: {integrity: sha512-XlqVtILonQnG+9fH2N3Aytria7P/1fwDgDhl29rde96uH2sLB8CHORIf2PfuLVzFQJ7Uqp8py9AYwr3ZUCFfWg==} + cpu: [x64] + os: [win32] + '@manypkg/find-root@1.1.0': resolution: {integrity: sha512-mki5uBvhHzO8kYYix/WRy2WX8S3B5wdVSc9D6KcU5lQNglP2yt58/VfLuAK49glRXChosY8ap2oJ1qgma3GUVA==} @@ -2565,6 +2592,36 @@ packages: '@cfworker/json-schema': optional: true + '@msgpackr-extract/msgpackr-extract-darwin-arm64@3.0.3': + resolution: {integrity: sha512-QZHtlVgbAdy2zAqNA9Gu1UpIuI8Xvsd1v8ic6B2pZmeFnFcMWiPLfWXh7TVw4eGEZ/C9TH281KwhVoeQUKbyjw==} + cpu: [arm64] + os: [darwin] + + '@msgpackr-extract/msgpackr-extract-darwin-x64@3.0.3': + resolution: {integrity: sha512-mdzd3AVzYKuUmiWOQ8GNhl64/IoFGol569zNRdkLReh6LRLHOXxU4U8eq0JwaD8iFHdVGqSy4IjFL4reoWCDFw==} + cpu: [x64] + os: [darwin] + + '@msgpackr-extract/msgpackr-extract-linux-arm64@3.0.3': + resolution: {integrity: sha512-YxQL+ax0XqBJDZiKimS2XQaf+2wDGVa1enVRGzEvLLVFeqa5kx2bWbtcSXgsxjQB7nRqqIGFIcLteF/sHeVtQg==} + cpu: [arm64] + os: [linux] + + '@msgpackr-extract/msgpackr-extract-linux-arm@3.0.3': + resolution: {integrity: sha512-fg0uy/dG/nZEXfYilKoRe7yALaNmHoYeIoJuJ7KJ+YyU2bvY8vPv27f7UKhGRpY6euFYqEVhxCFZgAUNQBM3nw==} + cpu: [arm] + os: [linux] + + '@msgpackr-extract/msgpackr-extract-linux-x64@3.0.3': + resolution: {integrity: sha512-cvwNfbP07pKUfq1uH+S6KJ7dT9K8WOE4ZiAcsrSes+UY55E/0jLYc+vq+DO7jlmqRb5zAggExKm0H7O/CBaesg==} + cpu: [x64] + os: [linux] + + '@msgpackr-extract/msgpackr-extract-win32-x64@3.0.3': + resolution: {integrity: sha512-x0fWaQtYp4E6sktbsdAqnehxDgEc/VwM7uLsRCYWaiGu0ykYdZPiS8zCWdnjHwyiumousxfBm4SO31eXqwEZhQ==} + cpu: [x64] + os: [win32] + '@napi-rs/wasm-runtime@0.2.12': resolution: {integrity: sha512-ZVWUcfwY4E/yPitQJl481FjFo3K22D6qF0DuFH6Y/nbnE11GY5uguDxZMGXPQ8WQ0128MXQD7TnfHyK4oWoIJQ==} @@ -4207,9 +4264,6 @@ packages: '@types/plist@3.0.5': resolution: {integrity: sha512-E6OCaRmAe4WDmWNsL/9RMqdkkzDCY1etutkflWk4c+AcjDU07Pcz1fQwTX0TQz+Pxqn9i4L1TU3UFpjnrcDgxA==} - '@types/proper-lockfile@4.1.4': - resolution: {integrity: sha512-uo2ABllncSqg9F1D4nugVl9v93RmjxF6LJzQLMLDdPaXCUIDPeOJ21Gbqi43xNKzBi/WQ0Q0dICqufzQbMjipQ==} - '@types/qs@6.14.0': resolution: {integrity: sha512-eOunJqu0K1923aExK6y8p6fsihYEn/BYuQ4g0CxAAgFc4b/ZLN4CrsRZ55srTdqoiLzU2B2evC+apEIxprEzkQ==} @@ -4238,9 +4292,6 @@ packages: '@types/resolve@1.20.2': resolution: {integrity: sha512-60BCwRFOZCQhDncwQdxxeOEEkbc5dIMccYLwbxsS4TUNeVECQ/pBJ0j09mrHOl/JJvpRPGwO9SvE4nR2Nb/a4Q==} - '@types/retry@0.12.5': - resolution: {integrity: sha512-3xSjTp3v03X/lSQLkczaN9UIEwJMoMCA1+Nb5HfbJEQWogdeQIyVtTvxPXDQjZ5zws8rFQfVfRdz03ARihPJgw==} - '@types/send@0.17.6': resolution: {integrity: sha512-Uqt8rPBE8SY0RK8JB1EzVOIZ32uqy8HwdxCnoCOsYrvnswqmFZ/k+9Ikidlk/ImhsdvBsloHbAlewb2IEBV/Og==} @@ -6956,6 +7007,10 @@ packages: resolution: {integrity: sha512-vsBzcU4oE+v0lj4FhVLzr9dBTv4/fHIa57l+GCwovP8MoFNZJTOhGU8PXd4v2VJCbECAaijBiHntiekFMLvo0g==} engines: {node: '>=18.0.0'} + lmdb@3.2.6: + resolution: {integrity: sha512-SuHqzPl7mYStna8WRotY8XX/EUZBjjv3QyKIByeCLFfC9uXT/OIHByEcA07PzbMfQAM0KYJtLgtpMRlIe5dErQ==} + hasBin: true + load-tsconfig@0.2.5: resolution: {integrity: sha512-IXO6OCs9yg8tMKzfPZ1YmheJbZCiEsnBdcB03l0OcfK9prKnJb96siuHCr5Fl37/yo9DnKU+TLpxzTUspw9shg==} engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} @@ -7343,6 +7398,13 @@ packages: ms@2.1.3: resolution: {integrity: sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==} + msgpackr-extract@3.0.3: + resolution: {integrity: sha512-P0efT1C9jIdVRefqjzOQ9Xml57zpOXnIuS+csaB4MdZbTdmGDLo8XhzBG1N7aO11gKDDkJvBLULeFTo46wwreA==} + hasBin: true + + msgpackr@1.11.8: + resolution: {integrity: sha512-bC4UGzHhVvgDNS7kn9tV8fAucIYUBuGojcaLiz7v+P63Lmtm0Xeji8B/8tYKddALXxJLpwIeBmUN3u64C4YkRA==} + mute-stream@2.0.0: resolution: {integrity: sha512-WWdIxpyjEn+FhQJQQv9aQAYlHoNVdzIzUySNV1gHUPDSdZJ3yZn7pAAbQcV7B56Mvu881q9FZV+0Vx2xC44VWA==} engines: {node: ^18.17.0 || >=20.5.0} @@ -7461,6 +7523,9 @@ packages: sass: optional: true + node-addon-api@6.1.0: + resolution: {integrity: sha512-+eawOlIgy680F0kBzPUNFhMZGtJ1YmqM6l4+Crf4IkImjYrO/mqPwRMh352g23uIaQKFItcQ64I7KMaJxHgAVA==} + node-fetch@2.7.0: resolution: {integrity: sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==} engines: {node: 4.x || >=6.0.0} @@ -7470,6 +7535,10 @@ packages: encoding: optional: true + node-gyp-build-optional-packages@5.2.2: + resolution: {integrity: sha512-s+w+rBWnpTMwSFbaE0UXsRlg7hU4FjekKU4eyAih5T8nJuNZT1nNsskXpxmeqSK9UzkBl6UgRlnKc8hz8IEqOw==} + hasBin: true + node-machine-id@1.1.12: resolution: {integrity: sha512-QNABxbrPa3qEIfrE6GOJ7BYIuignnJw7iQ2YPbc3Nla1HzRJjXzZOiikfF8m7eAMfichLt3M4VgLOetqgDmgGQ==} @@ -7596,6 +7665,9 @@ packages: resolution: {integrity: sha512-weP+BZ8MVNnlCm8c0Qdc1WSWq4Qn7I+9CJGm7Qali6g44e/PUzbjNqJX5NJ9ljlNMosfJvg1fKEGILklK9cwnw==} engines: {node: '>=18'} + ordered-binary@1.6.1: + resolution: {integrity: sha512-QkCdPooczexPLiXIrbVOPYkR3VO3T6v2OyKRkR1Xbhpy7/LAVXwahnRCgRp78Oe/Ehf0C/HATAxfSr6eA1oX+w==} + os-tmpdir@1.0.2: resolution: {integrity: sha512-D2FR03Vir7FIu45XBY20mTb+/ZSWB00sjU9jdQXt83gDrI4Ztz5Fs7/yy74g2N5SVQY4xY1qDr4rNddwYRVX0g==} engines: {node: '>=0.10.0'} @@ -8111,9 +8183,6 @@ packages: prop-types@15.8.1: resolution: {integrity: sha512-oj87CgZICdulUohogVAR7AjlC0327U4el4L6eAvOqCeudMDVU0NThNaV+b9Df4dXgSP1gXMTnPdhfe/2qDH5cg==} - proper-lockfile@4.1.2: - resolution: {integrity: sha512-TjNPblN4BwAWMXU8s9AEz4JmQxnD1NNL7bNOY/AKUzyamc379FWASUhc/K1pL2noVb+XmZKLL68cjzLsiOAMaA==} - property-information@7.1.0: resolution: {integrity: sha512-TwEZ+X+yCJmYfL7TPUOcvBZ4QfoT5YenQiJuX//0th53DE6w0xxLEtfK3iyryQFddXuvkIk51EEgrJQ0WJkOmQ==} @@ -8309,10 +8378,6 @@ packages: resolution: {integrity: sha512-oMA2dcrw6u0YfxJQXm342bFKX/E4sG9rbTzO9ptUcR/e8A33cHuvStiYOwH7fszkZlZ1z/ta9AAoPk2F4qIOHA==} engines: {node: '>=18'} - retry@0.12.0: - resolution: {integrity: sha512-9LkiTwjUh6rT555DtE9rTX+BKByPfrMzEAtnlEtdEwr3Nkffwiihqe2bWADg+OQRjt9gl6ICdmB/ZFDCGAtSow==} - engines: {node: '>= 4'} - reusify@1.1.0: resolution: {integrity: sha512-g6QUff04oZpHs0eG5p83rFLhHeV00ug/Yf9nZM6fLeUrPguBTkTQOdpAWWspMh55TZfVQDPaN3NQJfbVRAxdIw==} engines: {iojs: '>=1.0.0', node: '>=0.10.0'} @@ -9578,6 +9643,9 @@ packages: resolution: {integrity: sha512-o8qghlI8NZHU1lLPrpi2+Uq7abh4GGPpYANlalzWxyWteJOCsr/P+oPBA49TOLu5FTZO4d3F9MnWJfiMo4BkmA==} engines: {node: '>=18'} + weak-lru-cache@1.2.2: + resolution: {integrity: sha512-DEAoo25RfSYMuTGc9vPJzZcZullwIqRDSI9LOy+fkCJPi6hykCnfKaXTuPBDuXAUcqHXyOgFtHNp/kB2FjYHbw==} + web-vitals@5.1.0: resolution: {integrity: sha512-ArI3kx5jI0atlTtmV0fWU3fjpLmq/nD3Zr1iFFlJLaqa5wLBkUSzINwBPySCX/8jRyjlmy1Volw1kz1g9XE4Jg==} @@ -11987,6 +12055,24 @@ snapshots: zod: 3.25.76 zod-to-json-schema: 3.25.0(zod@3.25.76) + '@lmdb/lmdb-darwin-arm64@3.2.6': + optional: true + + '@lmdb/lmdb-darwin-x64@3.2.6': + optional: true + + '@lmdb/lmdb-linux-arm64@3.2.6': + optional: true + + '@lmdb/lmdb-linux-arm@3.2.6': + optional: true + + '@lmdb/lmdb-linux-x64@3.2.6': + optional: true + + '@lmdb/lmdb-win32-x64@3.2.6': + optional: true + '@manypkg/find-root@1.1.0': dependencies: '@babel/runtime': 7.28.4 @@ -12028,6 +12114,24 @@ snapshots: transitivePeerDependencies: - supports-color + '@msgpackr-extract/msgpackr-extract-darwin-arm64@3.0.3': + optional: true + + '@msgpackr-extract/msgpackr-extract-darwin-x64@3.0.3': + optional: true + + '@msgpackr-extract/msgpackr-extract-linux-arm64@3.0.3': + optional: true + + '@msgpackr-extract/msgpackr-extract-linux-arm@3.0.3': + optional: true + + '@msgpackr-extract/msgpackr-extract-linux-x64@3.0.3': + optional: true + + '@msgpackr-extract/msgpackr-extract-win32-x64@3.0.3': + optional: true + '@napi-rs/wasm-runtime@0.2.12': dependencies: '@emnapi/core': 1.7.1 @@ -13707,10 +13811,6 @@ snapshots: '@types/node': 20.19.25 xmlbuilder: 15.1.1 - '@types/proper-lockfile@4.1.4': - dependencies: - '@types/retry': 0.12.5 - '@types/qs@6.14.0': {} '@types/range-parser@1.2.7': {} @@ -13737,8 +13837,6 @@ snapshots: '@types/resolve@1.20.2': {} - '@types/retry@0.12.5': {} - '@types/send@0.17.6': dependencies: '@types/mime': 1.3.5 @@ -15445,8 +15543,8 @@ snapshots: '@next/eslint-plugin-next': 16.0.3 eslint: 9.39.1(jiti@2.6.1) eslint-import-resolver-node: 0.3.9 - eslint-import-resolver-typescript: 3.10.1(eslint-plugin-import@2.32.0)(eslint@9.39.1(jiti@2.6.1)) - eslint-plugin-import: 2.32.0(@typescript-eslint/parser@8.48.0(eslint@9.39.1(jiti@2.6.1))(typescript@5.9.3))(eslint-import-resolver-typescript@3.10.1)(eslint@9.39.1(jiti@2.6.1)) + eslint-import-resolver-typescript: 3.10.1(eslint-plugin-import@2.32.0(@typescript-eslint/parser@8.48.0(eslint@9.39.1(jiti@2.6.1))(typescript@5.9.3))(eslint@9.39.1(jiti@2.6.1)))(eslint@9.39.1(jiti@2.6.1)) + eslint-plugin-import: 2.32.0(@typescript-eslint/parser@8.48.0(eslint@9.39.1(jiti@2.6.1))(typescript@5.9.3))(eslint-import-resolver-typescript@3.10.1(eslint-plugin-import@2.32.0(@typescript-eslint/parser@8.48.0(eslint@9.39.1(jiti@2.6.1))(typescript@5.9.3))(eslint@9.39.1(jiti@2.6.1)))(eslint@9.39.1(jiti@2.6.1)))(eslint@9.39.1(jiti@2.6.1)) eslint-plugin-jsx-a11y: 6.10.2(eslint@9.39.1(jiti@2.6.1)) eslint-plugin-react: 7.37.5(eslint@9.39.1(jiti@2.6.1)) eslint-plugin-react-hooks: 7.0.1(eslint@9.39.1(jiti@2.6.1)) @@ -15468,7 +15566,7 @@ snapshots: transitivePeerDependencies: - supports-color - eslint-import-resolver-typescript@3.10.1(eslint-plugin-import@2.32.0)(eslint@9.39.1(jiti@2.6.1)): + eslint-import-resolver-typescript@3.10.1(eslint-plugin-import@2.32.0(@typescript-eslint/parser@8.48.0(eslint@9.39.1(jiti@2.6.1))(typescript@5.9.3))(eslint@9.39.1(jiti@2.6.1)))(eslint@9.39.1(jiti@2.6.1)): dependencies: '@nolyfill/is-core-module': 1.0.39 debug: 4.4.3 @@ -15479,22 +15577,22 @@ snapshots: tinyglobby: 0.2.15 unrs-resolver: 1.11.1 optionalDependencies: - eslint-plugin-import: 2.32.0(@typescript-eslint/parser@8.48.0(eslint@9.39.1(jiti@2.6.1))(typescript@5.9.3))(eslint-import-resolver-typescript@3.10.1)(eslint@9.39.1(jiti@2.6.1)) + eslint-plugin-import: 2.32.0(@typescript-eslint/parser@8.48.0(eslint@9.39.1(jiti@2.6.1))(typescript@5.9.3))(eslint-import-resolver-typescript@3.10.1(eslint-plugin-import@2.32.0(@typescript-eslint/parser@8.48.0(eslint@9.39.1(jiti@2.6.1))(typescript@5.9.3))(eslint@9.39.1(jiti@2.6.1)))(eslint@9.39.1(jiti@2.6.1)))(eslint@9.39.1(jiti@2.6.1)) transitivePeerDependencies: - supports-color - eslint-module-utils@2.12.1(@typescript-eslint/parser@8.48.0(eslint@9.39.1(jiti@2.6.1))(typescript@5.9.3))(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@3.10.1)(eslint@9.39.1(jiti@2.6.1)): + eslint-module-utils@2.12.1(@typescript-eslint/parser@8.48.0(eslint@9.39.1(jiti@2.6.1))(typescript@5.9.3))(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@3.10.1(eslint-plugin-import@2.32.0(@typescript-eslint/parser@8.48.0(eslint@9.39.1(jiti@2.6.1))(typescript@5.9.3))(eslint@9.39.1(jiti@2.6.1)))(eslint@9.39.1(jiti@2.6.1)))(eslint@9.39.1(jiti@2.6.1)): dependencies: debug: 3.2.7 optionalDependencies: '@typescript-eslint/parser': 8.48.0(eslint@9.39.1(jiti@2.6.1))(typescript@5.9.3) eslint: 9.39.1(jiti@2.6.1) eslint-import-resolver-node: 0.3.9 - eslint-import-resolver-typescript: 3.10.1(eslint-plugin-import@2.32.0)(eslint@9.39.1(jiti@2.6.1)) + eslint-import-resolver-typescript: 3.10.1(eslint-plugin-import@2.32.0(@typescript-eslint/parser@8.48.0(eslint@9.39.1(jiti@2.6.1))(typescript@5.9.3))(eslint@9.39.1(jiti@2.6.1)))(eslint@9.39.1(jiti@2.6.1)) transitivePeerDependencies: - supports-color - eslint-plugin-import@2.32.0(@typescript-eslint/parser@8.48.0(eslint@9.39.1(jiti@2.6.1))(typescript@5.9.3))(eslint-import-resolver-typescript@3.10.1)(eslint@9.39.1(jiti@2.6.1)): + eslint-plugin-import@2.32.0(@typescript-eslint/parser@8.48.0(eslint@9.39.1(jiti@2.6.1))(typescript@5.9.3))(eslint-import-resolver-typescript@3.10.1(eslint-plugin-import@2.32.0(@typescript-eslint/parser@8.48.0(eslint@9.39.1(jiti@2.6.1))(typescript@5.9.3))(eslint@9.39.1(jiti@2.6.1)))(eslint@9.39.1(jiti@2.6.1)))(eslint@9.39.1(jiti@2.6.1)): dependencies: '@rtsao/scc': 1.1.0 array-includes: 3.1.9 @@ -15505,7 +15603,7 @@ snapshots: doctrine: 2.1.0 eslint: 9.39.1(jiti@2.6.1) eslint-import-resolver-node: 0.3.9 - eslint-module-utils: 2.12.1(@typescript-eslint/parser@8.48.0(eslint@9.39.1(jiti@2.6.1))(typescript@5.9.3))(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@3.10.1)(eslint@9.39.1(jiti@2.6.1)) + eslint-module-utils: 2.12.1(@typescript-eslint/parser@8.48.0(eslint@9.39.1(jiti@2.6.1))(typescript@5.9.3))(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@3.10.1(eslint-plugin-import@2.32.0(@typescript-eslint/parser@8.48.0(eslint@9.39.1(jiti@2.6.1))(typescript@5.9.3))(eslint@9.39.1(jiti@2.6.1)))(eslint@9.39.1(jiti@2.6.1)))(eslint@9.39.1(jiti@2.6.1)) hasown: 2.0.2 is-core-module: 2.16.1 is-glob: 4.0.3 @@ -16931,6 +17029,21 @@ snapshots: rfdc: 1.4.1 wrap-ansi: 9.0.2 + lmdb@3.2.6: + dependencies: + msgpackr: 1.11.8 + node-addon-api: 6.1.0 + node-gyp-build-optional-packages: 5.2.2 + ordered-binary: 1.6.1 + weak-lru-cache: 1.2.2 + optionalDependencies: + '@lmdb/lmdb-darwin-arm64': 3.2.6 + '@lmdb/lmdb-darwin-x64': 3.2.6 + '@lmdb/lmdb-linux-arm': 3.2.6 + '@lmdb/lmdb-linux-arm64': 3.2.6 + '@lmdb/lmdb-linux-x64': 3.2.6 + '@lmdb/lmdb-win32-x64': 3.2.6 + load-tsconfig@0.2.5: {} locate-path@3.0.0: @@ -17557,6 +17670,22 @@ snapshots: ms@2.1.3: {} + msgpackr-extract@3.0.3: + dependencies: + node-gyp-build-optional-packages: 5.2.2 + optionalDependencies: + '@msgpackr-extract/msgpackr-extract-darwin-arm64': 3.0.3 + '@msgpackr-extract/msgpackr-extract-darwin-x64': 3.0.3 + '@msgpackr-extract/msgpackr-extract-linux-arm': 3.0.3 + '@msgpackr-extract/msgpackr-extract-linux-arm64': 3.0.3 + '@msgpackr-extract/msgpackr-extract-linux-x64': 3.0.3 + '@msgpackr-extract/msgpackr-extract-win32-x64': 3.0.3 + optional: true + + msgpackr@1.11.8: + optionalDependencies: + msgpackr-extract: 3.0.3 + mute-stream@2.0.0: {} mute-stream@3.0.0: {} @@ -17681,12 +17810,18 @@ snapshots: - '@babel/core' - babel-plugin-macros + node-addon-api@6.1.0: {} + node-fetch@2.7.0(encoding@0.1.13): dependencies: whatwg-url: 5.0.0 optionalDependencies: encoding: 0.1.13 + node-gyp-build-optional-packages@5.2.2: + dependencies: + detect-libc: 2.1.2 + node-machine-id@1.1.12: {} node-releases@2.0.27: {} @@ -17851,6 +17986,8 @@ snapshots: string-width: 7.2.0 strip-ansi: 7.1.2 + ordered-binary@1.6.1: {} + os-tmpdir@1.0.2: {} outdent@0.5.0: {} @@ -18327,12 +18464,6 @@ snapshots: object-assign: 4.1.1 react-is: 16.13.1 - proper-lockfile@4.1.2: - dependencies: - graceful-fs: 4.2.11 - retry: 0.12.0 - signal-exit: 3.0.7 - property-information@7.1.0: {} proxy-addr@2.0.7: @@ -18570,8 +18701,6 @@ snapshots: onetime: 7.0.0 signal-exit: 4.1.0 - retry@0.12.0: {} - reusify@1.1.0: {} rfdc@1.4.1: {} @@ -20453,6 +20582,8 @@ snapshots: dependencies: xml-name-validator: 5.0.0 + weak-lru-cache@1.2.2: {} + web-vitals@5.1.0: {} webidl-conversions@3.0.1: {} From dc7bd4631da526ec05b3379831246991d7900ab5 Mon Sep 17 00:00:00 2001 From: AndreyHirsa <58431746+AndreyHirsa@users.noreply.github.com> Date: Tue, 3 Feb 2026 17:35:34 +0300 Subject: [PATCH 2/5] fix(new-compiler): use correct assertion for sync constructor throw Co-authored-by: coderabbitai[bot] <136622811+coderabbitai[bot]@users.noreply.github.com> --- packages/new-compiler/src/metadata/manager.test.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/new-compiler/src/metadata/manager.test.ts b/packages/new-compiler/src/metadata/manager.test.ts index cc6794061..7858cbce4 100644 --- a/packages/new-compiler/src/metadata/manager.test.ts +++ b/packages/new-compiler/src/metadata/manager.test.ts @@ -309,7 +309,7 @@ describe("MetadataManager", () => { describe("error handling", () => { it("should throw descriptive error for invalid path", async () => { const invalidPath = "/root/definitely/cannot/create/this/path"; - await expect(async () => new MetadataManager(invalidPath)).rejects.toThrow(); + expect(() => new MetadataManager(invalidPath)).toThrow(); }); }); From b2f695fefbb9965bea065764d3b752528385f005 Mon Sep 17 00:00:00 2001 From: AndreyHirsa Date: Tue, 3 Feb 2026 20:05:46 +0300 Subject: [PATCH 3/5] fix(new-compiler): remove await from sync cleanupExistingMetadata calls --- .../new-compiler/src/metadata/manager.test.ts | 45 +++++++++++++------ 1 file changed, 32 insertions(+), 13 deletions(-) diff --git a/packages/new-compiler/src/metadata/manager.test.ts b/packages/new-compiler/src/metadata/manager.test.ts index 7858cbce4..833607301 100644 --- a/packages/new-compiler/src/metadata/manager.test.ts +++ b/packages/new-compiler/src/metadata/manager.test.ts @@ -63,7 +63,10 @@ function sendToWorker( message: WorkerMessage, ): Promise { return new Promise((resolve, reject) => { - const timeout = setTimeout(() => reject(new Error("Worker timeout")), 10000); + const timeout = setTimeout( + () => reject(new Error("Worker timeout")), + 10000, + ); worker.once("message", (response: WorkerResponse) => { clearTimeout(timeout); resolve(response); @@ -103,8 +106,8 @@ describe("MetadataManager", () => { testDbPath = createUniqueDbPath(); }); - afterEach(async () => { - await cleanupExistingMetadata(testDbPath); + afterEach(() => { + cleanupExistingMetadata(testDbPath); }); describe("createEmptyMetadata", () => { @@ -200,7 +203,10 @@ describe("MetadataManager", () => { // Many saves with overlapping keys for (let i = 0; i < 50; i++) { await manager.saveMetadataWithEntries([ - createTestEntry({ hash: `persistent-${i % 10}`, sourceText: `v${i}` }), + createTestEntry({ + hash: `persistent-${i % 10}`, + sourceText: `v${i}`, + }), createTestEntry({ hash: `unique-${i}` }), ]); } @@ -237,10 +243,12 @@ describe("MetadataManager", () => { describe("cleanupExistingMetadata", () => { it("should remove database and allow reopening with fresh state", async () => { const manager1 = new MetadataManager(testDbPath); - await manager1.saveMetadataWithEntries([createTestEntry({ hash: "before" })]); + await manager1.saveMetadataWithEntries([ + createTestEntry({ hash: "before" }), + ]); expect(fs.existsSync(testDbPath)).toBe(true); - await cleanupExistingMetadata(testDbPath); + cleanupExistingMetadata(testDbPath); expect(fs.existsSync(testDbPath)).toBe(false); // Should work with fresh state after cleanup @@ -290,18 +298,22 @@ describe("MetadataManager", () => { try { const manager1 = new MetadataManager(path1); - await manager1.saveMetadataWithEntries([createTestEntry({ hash: "in-path1" })]); + await manager1.saveMetadataWithEntries([ + createTestEntry({ hash: "in-path1" }), + ]); const manager2 = new MetadataManager(path2); - await manager2.saveMetadataWithEntries([createTestEntry({ hash: "in-path2" })]); + await manager2.saveMetadataWithEntries([ + createTestEntry({ hash: "in-path2" }), + ]); // Each database has its own data const result2 = manager2.loadMetadata(); expect(result2.entries["in-path2"]).toBeDefined(); expect(result2.entries["in-path1"]).toBeUndefined(); } finally { - await cleanupExistingMetadata(path1); - await cleanupExistingMetadata(path2); + cleanupExistingMetadata(path1); + cleanupExistingMetadata(path2); } }); }); @@ -335,7 +347,10 @@ describe("MetadataManager", () => { entries: [{ hash: "from-process-2" }], }); - const read = await sendToWorker(worker1, { type: "read", dbPath: testDbPath }); + const read = await sendToWorker(worker1, { + type: "read", + dbPath: testDbPath, + }); expect(read.totalEntries).toBe(2); expect(read.entries?.["from-process-1"]).toBeDefined(); expect(read.entries?.["from-process-2"]).toBeDefined(); @@ -354,10 +369,14 @@ describe("MetadataManager", () => { ]); // Main runner cleans up (simulates Next.js build start) - await cleanupExistingMetadata(testDbPath); + cleanupExistingMetadata(testDbPath); // Spawn workers (simulates Next.js build workers) - const workers = await Promise.all([spawnWorker(), spawnWorker(), spawnWorker()]); + const workers = await Promise.all([ + spawnWorker(), + spawnWorker(), + spawnWorker(), + ]); try { // Workers write concurrently From 1bfb5ea86381922e85887860b540cc2836d1fd18 Mon Sep 17 00:00:00 2001 From: AndreyHirsa Date: Wed, 4 Feb 2026 00:42:27 +0300 Subject: [PATCH 4/5] refactor(new-compiler): replace MetadataManager class with pure functions --- .changeset/common-teeth-reply.md | 2 +- .../docs/TRANSLATION_ARCHITECTURE.md | 8 +- .../new-compiler/src/metadata/manager.test.ts | 247 ++------------- .../src/metadata/manager.test.worker.ts | 87 ------ packages/new-compiler/src/metadata/manager.ts | 283 +++++++----------- .../src/plugin/build-translator.ts | 13 +- .../src/plugin/next-compiler-loader.ts | 6 +- packages/new-compiler/src/plugin/unplugin.ts | 6 +- .../translation-server/translation-server.ts | 10 +- 9 files changed, 172 insertions(+), 490 deletions(-) delete mode 100644 packages/new-compiler/src/metadata/manager.test.worker.ts diff --git a/.changeset/common-teeth-reply.md b/.changeset/common-teeth-reply.md index fec8b4bc6..c6adb6c24 100644 --- a/.changeset/common-teeth-reply.md +++ b/.changeset/common-teeth-reply.md @@ -4,6 +4,6 @@ - Migrate metadata storage from JSON files to LMDB - New storage locations: .lingo/metadata-dev/ and .lingo/metadata-build/ +- Use pure functions with short-lived connections for multi-worker safety - Update compiler docs - Remove proper-lockfile dependency -- New tests for MetadataManager diff --git a/packages/new-compiler/docs/TRANSLATION_ARCHITECTURE.md b/packages/new-compiler/docs/TRANSLATION_ARCHITECTURE.md index f7f78058b..c05dde93b 100644 --- a/packages/new-compiler/docs/TRANSLATION_ARCHITECTURE.md +++ b/packages/new-compiler/docs/TRANSLATION_ARCHITECTURE.md @@ -8,7 +8,7 @@ metadata management, translation execution, and caching. ## Architectural Principles 1. **Metadata storage** is only known by: - - Metadata Manager (reads/writes LMDB database) + - Metadata functions (reads/writes LMDB database) - Translation Service (orchestrator that coordinates everything) 2. **Translators are stateless** and work with abstract `TranslatableEntry` types @@ -36,9 +36,9 @@ metadata management, translation execution, and caching. │ writes ↓ ┌──────────────────────────────────────────────────┐ -│ MetadataManager │ -│ - ONLY component that reads/writes LMDB database│ -│ - Provides metadata loading/saving │ +│ Metadata Functions (saveMetadata/loadMetadata) │ +│ - Pure functions for LMDB database access │ +│ - Short-lived connections (multi-worker safe) │ │ - Returns TranslationEntry[] │ └────────────────┬─────────────────────────────────┘ │ reads from diff --git a/packages/new-compiler/src/metadata/manager.test.ts b/packages/new-compiler/src/metadata/manager.test.ts index 833607301..19a0d6784 100644 --- a/packages/new-compiler/src/metadata/manager.test.ts +++ b/packages/new-compiler/src/metadata/manager.test.ts @@ -2,79 +2,15 @@ import { describe, it, expect, beforeEach, afterEach } from "vitest"; import fs from "fs"; import path from "path"; import os from "os"; -import { fork, type ChildProcess } from "child_process"; import { - MetadataManager, createEmptyMetadata, loadMetadata, + saveMetadata, cleanupExistingMetadata, getMetadataPath, } from "./manager"; import type { TranslationEntry } from "../types"; -// Worker helper for multi-process tests -const WORKER_PATH = path.join(__dirname, "manager.test.worker.ts"); - -interface WorkerMessage { - type: "write" | "read" | "write-batch"; - dbPath: string; - workerId?: string; - entries?: Array<{ hash: string; sourceText?: string }>; -} - -interface WorkerResponse { - type: "success" | "error" | "ready"; - workerId?: string; - totalEntries?: number; - entries?: Record; - error?: string; -} - -function spawnWorker(): Promise { - return new Promise((resolve, reject) => { - const worker = fork(WORKER_PATH, [], { - execPath: path.join(__dirname, "../../node_modules/.bin/tsx"), - stdio: ["pipe", "pipe", "pipe", "ipc"], - }); - - const timeout = setTimeout(() => { - worker.kill(); - reject(new Error("Worker spawn timeout")); - }, 5000); - - worker.once("message", (msg: WorkerResponse) => { - clearTimeout(timeout); - if (msg.type === "ready") { - resolve(worker); - } else { - reject(new Error("Worker did not signal ready")); - } - }); - - worker.once("error", (err) => { - clearTimeout(timeout); - reject(err); - }); - }); -} - -function sendToWorker( - worker: ChildProcess, - message: WorkerMessage, -): Promise { - return new Promise((resolve, reject) => { - const timeout = setTimeout( - () => reject(new Error("Worker timeout")), - 10000, - ); - worker.once("message", (response: WorkerResponse) => { - clearTimeout(timeout); - resolve(response); - }); - worker.send(message); - }); -} - function createTestEntry( overrides: Partial & { hash?: string; @@ -99,7 +35,7 @@ function createUniqueDbPath(): string { ); } -describe("MetadataManager", () => { +describe("metadata", () => { let testDbPath: string; beforeEach(() => { @@ -123,14 +59,13 @@ describe("MetadataManager", () => { }); describe("loadMetadata", () => { - it("should return empty metadata for new database", () => { - const metadata = loadMetadata(testDbPath); + it("should return empty metadata for new database", async () => { + const metadata = await loadMetadata(testDbPath); expect(metadata.entries).toEqual({}); expect(metadata.stats!.totalEntries).toBe(0); }); it("should load and preserve all entry fields", async () => { - const manager = new MetadataManager(testDbPath); const entry: TranslationEntry = { type: "content", hash: "full-entry", @@ -139,70 +74,64 @@ describe("MetadataManager", () => { location: { filePath: "app.tsx", line: 42, column: 10 }, }; - await manager.saveMetadataWithEntries([entry]); - const metadata = loadMetadata(testDbPath); + await saveMetadata(testDbPath, [entry]); + const metadata = await loadMetadata(testDbPath); expect(metadata.entries["full-entry"]).toEqual(entry); expect(metadata.stats!.totalEntries).toBe(1); }); it("should handle entries with very long sourceText", async () => { - const manager = new MetadataManager(testDbPath); const longText = "A".repeat(100000); - await manager.saveMetadataWithEntries([ + await saveMetadata(testDbPath, [ createTestEntry({ hash: "long-text", sourceText: longText }), ]); - const metadata = manager.loadMetadata(); + const metadata = await loadMetadata(testDbPath); expect(metadata.entries["long-text"].sourceText).toBe(longText); }); }); - describe("saveMetadataWithEntries", () => { + describe("saveMetadata", () => { it("should save, accumulate, and update entries correctly", async () => { - const manager = new MetadataManager(testDbPath); - // Save single entry - await manager.saveMetadataWithEntries([ + await saveMetadata(testDbPath, [ createTestEntry({ hash: "entry-1", sourceText: "v1" }), ]); - expect(manager.loadMetadata().stats!.totalEntries).toBe(1); + expect((await loadMetadata(testDbPath)).stats!.totalEntries).toBe(1); // Accumulate multiple entries - await manager.saveMetadataWithEntries([ + await saveMetadata(testDbPath, [ createTestEntry({ hash: "entry-2" }), createTestEntry({ hash: "entry-3" }), ]); - expect(manager.loadMetadata().stats!.totalEntries).toBe(3); + expect((await loadMetadata(testDbPath)).stats!.totalEntries).toBe(3); // Update existing entry (count should not increase) - const result = await manager.saveMetadataWithEntries([ + const result = await saveMetadata(testDbPath, [ createTestEntry({ hash: "entry-1", sourceText: "v2" }), ]); expect(result.stats!.totalEntries).toBe(3); expect(result.entries["entry-1"].sourceText).toBe("v2"); // Empty array should not change anything - await manager.saveMetadataWithEntries([]); - expect(manager.loadMetadata().stats!.totalEntries).toBe(3); + await saveMetadata(testDbPath, []); + expect((await loadMetadata(testDbPath)).stats!.totalEntries).toBe(3); }); it("should handle large batch of entries", async () => { - const manager = new MetadataManager(testDbPath); const entries = Array.from({ length: 1000 }, (_, i) => createTestEntry({ hash: `batch-${i}` }), ); - const result = await manager.saveMetadataWithEntries(entries); + const result = await saveMetadata(testDbPath, entries); expect(result.stats!.totalEntries).toBe(1000); }); it("should maintain data integrity after many operations", async () => { - const manager = new MetadataManager(testDbPath); - // Many saves with overlapping keys for (let i = 0; i < 50; i++) { - await manager.saveMetadataWithEntries([ + await saveMetadata(testDbPath, [ createTestEntry({ hash: `persistent-${i % 10}`, sourceText: `v${i}`, @@ -211,49 +140,42 @@ describe("MetadataManager", () => { ]); } - const final = manager.loadMetadata(); + const final = await loadMetadata(testDbPath); // 10 persistent + 50 unique = 60 expect(final.stats!.totalEntries).toBe(60); // Verify save result matches load result - const saveResult = await manager.saveMetadataWithEntries([]); + const saveResult = await saveMetadata(testDbPath, []); expect(saveResult.stats!.totalEntries).toBe(final.stats!.totalEntries); }); }); describe("concurrent access (single process)", () => { - it("should handle concurrent operations from multiple manager instances", async () => { - const manager1 = new MetadataManager(testDbPath); - const manager2 = new MetadataManager(testDbPath); - - // Concurrent writes - const promises = Array.from({ length: 20 }, (_, i) => - (i % 2 === 0 ? manager1 : manager2).saveMetadataWithEntries([ + it("should handle concurrent operations from multiple calls", async () => { + // LMDB handles concurrent writes via OS-level locking + const promises = Array.from({ length: 20 }, async (_, i) => { + await saveMetadata(testDbPath, [ createTestEntry({ hash: `concurrent-${i}` }), - ]), - ); + ]); + }); await Promise.all(promises); - // Both managers should see all entries - expect(manager1.loadMetadata().stats!.totalEntries).toBe(20); - expect(manager2.loadMetadata().stats!.totalEntries).toBe(20); + // Verify all entries are present + expect((await loadMetadata(testDbPath)).stats!.totalEntries).toBe(20); }); }); describe("cleanupExistingMetadata", () => { it("should remove database and allow reopening with fresh state", async () => { - const manager1 = new MetadataManager(testDbPath); - await manager1.saveMetadataWithEntries([ - createTestEntry({ hash: "before" }), - ]); + await saveMetadata(testDbPath, [createTestEntry({ hash: "before" })]); expect(fs.existsSync(testDbPath)).toBe(true); + // Cleanup should succeed because saveMetadata closes the DB cleanupExistingMetadata(testDbPath); expect(fs.existsSync(testDbPath)).toBe(false); // Should work with fresh state after cleanup - const manager2 = new MetadataManager(testDbPath); - const metadata = manager2.loadMetadata(); + const metadata = await loadMetadata(testDbPath); expect(metadata.entries["before"]).toBeUndefined(); expect(metadata.stats!.totalEntries).toBe(0); }); @@ -291,115 +213,10 @@ describe("MetadataManager", () => { }); }); - describe("singleton database connection", () => { - it("should close previous db when switching paths", async () => { - const path1 = createUniqueDbPath(); - const path2 = createUniqueDbPath(); - - try { - const manager1 = new MetadataManager(path1); - await manager1.saveMetadataWithEntries([ - createTestEntry({ hash: "in-path1" }), - ]); - - const manager2 = new MetadataManager(path2); - await manager2.saveMetadataWithEntries([ - createTestEntry({ hash: "in-path2" }), - ]); - - // Each database has its own data - const result2 = manager2.loadMetadata(); - expect(result2.entries["in-path2"]).toBeDefined(); - expect(result2.entries["in-path1"]).toBeUndefined(); - } finally { - cleanupExistingMetadata(path1); - cleanupExistingMetadata(path2); - } - }); - }); - describe("error handling", () => { it("should throw descriptive error for invalid path", async () => { const invalidPath = "/root/definitely/cannot/create/this/path"; - expect(() => new MetadataManager(invalidPath)).toThrow(); - }); - }); - - describe("multi-process scenarios (Next.js-like)", () => { - // These tests spawn actual child processes to simulate Next.js workers. - // Each process has its own singleton DatabaseConnection. - // LMDB handles cross-process concurrency via OS-level locking (MVCC). - - it("should share data between separate processes", async () => { - const worker1 = await spawnWorker(); - const worker2 = await spawnWorker(); - - try { - await sendToWorker(worker1, { - type: "write", - dbPath: testDbPath, - entries: [{ hash: "from-process-1" }], - }); - - await sendToWorker(worker2, { - type: "write", - dbPath: testDbPath, - entries: [{ hash: "from-process-2" }], - }); - - const read = await sendToWorker(worker1, { - type: "read", - dbPath: testDbPath, - }); - expect(read.totalEntries).toBe(2); - expect(read.entries?.["from-process-1"]).toBeDefined(); - expect(read.entries?.["from-process-2"]).toBeDefined(); - } finally { - worker1.kill(); - worker2.kill(); - } - }); - - it("should simulate Next.js build: cleanup then multi-worker writes", async () => { - // Pre-populate with "old" data - const setup = new MetadataManager(testDbPath); - await setup.saveMetadataWithEntries([ - createTestEntry({ hash: "old-entry-1" }), - createTestEntry({ hash: "old-entry-2" }), - ]); - - // Main runner cleans up (simulates Next.js build start) - cleanupExistingMetadata(testDbPath); - - // Spawn workers (simulates Next.js build workers) - const workers = await Promise.all([ - spawnWorker(), - spawnWorker(), - spawnWorker(), - ]); - - try { - // Workers write concurrently - await Promise.all( - workers.map((worker, i) => - sendToWorker(worker, { - type: "write-batch", - dbPath: testDbPath, - entries: Array.from({ length: 5 }, (_, j) => ({ - hash: `worker${i}-file${j}`, - })), - }), - ), - ); - - // Main process reads final metadata - const finalMetadata = loadMetadata(testDbPath); - expect(finalMetadata.stats!.totalEntries).toBe(15); - expect(finalMetadata.entries["old-entry-1"]).toBeUndefined(); - expect(finalMetadata.entries["worker0-file0"]).toBeDefined(); - } finally { - workers.forEach((w) => w.kill()); - } + await expect(loadMetadata(invalidPath)).rejects.toThrow(); }); }); }); diff --git a/packages/new-compiler/src/metadata/manager.test.worker.ts b/packages/new-compiler/src/metadata/manager.test.worker.ts deleted file mode 100644 index 5978e7943..000000000 --- a/packages/new-compiler/src/metadata/manager.test.worker.ts +++ /dev/null @@ -1,87 +0,0 @@ -/** - * Worker script for multi-process LMDB tests. - * This file is forked by manager.test.ts to simulate Next.js workers. - * - * Each forked process has its own singleton DatabaseConnection, - * testing true cross-process LMDB behavior. - */ -import { MetadataManager, loadMetadata } from "./manager"; -import type { TranslationEntry } from "../types"; - -interface WorkerMessage { - type: "write" | "read" | "write-batch"; - dbPath: string; - workerId?: string; - entries?: Array<{ hash: string; sourceText?: string }>; -} - -interface WorkerResponse { - type: "success" | "error"; - workerId?: string; - totalEntries?: number; - entries?: Record; - error?: string; -} - -function createEntry(hash: string, sourceText?: string): TranslationEntry { - return { - type: "content", - hash, - sourceText: sourceText ?? `Source text for ${hash}`, - context: { - filePath: "test.tsx", - componentName: "TestComponent", - }, - location: { - filePath: "test.tsx", - line: 1, - column: 1, - }, - } as TranslationEntry; -} - -async function handleMessage(message: WorkerMessage): Promise { - try { - const manager = new MetadataManager(message.dbPath); - - if (message.type === "write" || message.type === "write-batch") { - const entries = (message.entries ?? []).map((e) => - createEntry(e.hash, e.sourceText), - ); - await manager.saveMetadataWithEntries(entries); - const metadata = manager.loadMetadata(); - return { - type: "success", - workerId: message.workerId, - totalEntries: metadata.stats?.totalEntries ?? 0, - }; - } - - if (message.type === "read") { - const metadata = loadMetadata(message.dbPath); - return { - type: "success", - workerId: message.workerId, - totalEntries: metadata.stats?.totalEntries ?? 0, - entries: metadata.entries, - }; - } - - return { type: "error", error: "Unknown message type" }; - } catch (error) { - return { - type: "error", - workerId: message.workerId, - error: error instanceof Error ? error.message : String(error), - }; - } -} - -// Handle messages from parent process -process.on("message", async (message: WorkerMessage) => { - const response = await handleMessage(message); - process.send!(response); -}); - -// Signal ready -process.send!({ type: "ready" }); diff --git a/packages/new-compiler/src/metadata/manager.ts b/packages/new-compiler/src/metadata/manager.ts index 489e826ab..5c590ea26 100644 --- a/packages/new-compiler/src/metadata/manager.ts +++ b/packages/new-compiler/src/metadata/manager.ts @@ -13,69 +13,65 @@ const METADATA_DIR_DEV = "metadata-dev"; const METADATA_DIR_BUILD = "metadata-build"; /** - * Singleton class managing the LMDB database connection. - * Only one database is ever open at a time (either dev or build). - * - * Note: In multi-process bundler environments (e.g., Webpack workers), - * each process has its own singleton instance. LMDB handles concurrent - * access across processes safely via OS-level locking (MVCC). + * Opens an LMDB database connection at the given path. */ -class DatabaseConnection { - private static instance: { db: RootDatabase; path: string } | null = null; +function openDatabase(dbPath: string): RootDatabase { + fs.mkdirSync(dbPath, { recursive: true }); + + // Build mode: disable fsync - metadata is deleted immediately after build, so durability is not needed. + // Dev mode: keep sync enabled for consistency during long-running sessions. + const isBuildMode = dbPath.endsWith(METADATA_DIR_BUILD); + + try { + return open({ + path: dbPath, + compression: true, + noSync: isBuildMode, + }); + } catch (error) { + const message = error instanceof Error ? error.message : String(error); + throw new Error( + `Failed to open LMDB metadata database at ${dbPath}. Error: ${message}`, + ); + } +} - static get(dbPath: string): RootDatabase { - // Return existing db if same path - if (this.instance?.path === dbPath) { - return this.instance.db; - } +/** + * Safely close database connection. + */ +async function closeDatabase(db: RootDatabase, dbPath: string): Promise { + try { + await db.close(); + } catch (e) { + logger.debug(`Error closing database at ${dbPath}: ${e}`); + } +} - // Close previous db if different path - if (this.instance) { - try { - this.instance.db.close(); - } catch (e) { - logger.debug(`Error closing previous database: ${e}`); - } - this.instance = null; - } +/** + * Read all entries from an open database. + * Internal helper - does not manage connection lifecycle. + */ +function readEntriesFromDb(db: RootDatabase): MetadataSchema { + const entries: Record = {}; - fs.mkdirSync(dbPath, { recursive: true }); - - // Build mode: disable fsync - metadata is deleted immediately after build, - // so durability is not needed and this avoids delay on close. - // Dev mode: keep sync enabled for consistency during long-running sessions. - const isBuildMode = dbPath.endsWith(METADATA_DIR_BUILD); - - try { - const db = open({ - path: dbPath, - compression: true, - noSync: isBuildMode, - }); - - this.instance = { db, path: dbPath }; - return db; - } catch (error) { - const message = error instanceof Error ? error.message : String(error); - throw new Error( - `Failed to open LMDB metadata database at ${dbPath}. ` + - `This may be caused by disk space issues, permission problems, or database corruption. ` + - `Try deleting the ${dbPath} directory and rebuilding. ` + - `Original error: ${message}`, - ); + for (const { key, value } of db.getRange()) { + const keyStr = key as string; + if (keyStr !== STATS_KEY) { + entries[keyStr] = value as TranslationEntry; } } - static close(dbPath: string): void { - if (this.instance?.path === dbPath) { - try { - this.instance.db.close(); - } catch (e) { - logger.debug(`Error closing database at ${dbPath}: ${e}`); - } - this.instance = null; - } + const stats = db.get(STATS_KEY) as MetadataSchema["stats"] | undefined; + if (Object.keys(entries).length === 0 && !stats) { + return createEmptyMetadata(); } + return { + entries, + stats: stats || { + totalEntries: Object.keys(entries).length, + lastUpdated: new Date().toISOString(), + }, + }; } export function createEmptyMetadata(): MetadataSchema { @@ -88,63 +84,83 @@ export function createEmptyMetadata(): MetadataSchema { }; } -export function loadMetadata(dbPath: string): MetadataSchema { - return new MetadataManager(dbPath).loadMetadata(); +/** + * Load metadata from LMDB database. + */ +export async function loadMetadata(dbPath: string): Promise { + const db = openDatabase(dbPath); + try { + return readEntriesFromDb(db); + } finally { + await closeDatabase(db, dbPath); + } } /** - * Synchronous sleep using Atomics.wait(). - * Blocks the thread without consuming CPU cycles. + * Save translation entries to the metadata database. + * + * LMDB handles concurrency via MVCC, so multiple processes can write safely. + * + * @param dbPath - Path to the LMDB database directory + * @param entries - Translation entries to add/update + * @returns The updated metadata schema */ -function sleepSync(ms: number): void { - const sharedBuffer = new SharedArrayBuffer(4); - const int32 = new Int32Array(sharedBuffer); - Atomics.wait(int32, 0, 0, ms); +export async function saveMetadata( + dbPath: string, + entries: TranslationEntry[], +): Promise { + const db = openDatabase(dbPath); + try { + await db.transaction(() => { + for (const entry of entries) { + db.put(entry.hash, entry); + } + + // Count entries explicitly (excluding stats key) for clarity + let entryCount = 0; + for (const { key } of db.getRange()) { + if (key !== STATS_KEY) { + entryCount++; + } + } + + const stats = { + totalEntries: entryCount, + lastUpdated: new Date().toISOString(), + }; + db.put(STATS_KEY, stats); + }); + + return readEntriesFromDb(db); + } finally { + await closeDatabase(db, dbPath); + } } /** * Clean up the metadata database directory. - * Synchronous to work with process exit handlers. - * Uses Atomics.wait() for non-busy-wait blocking during retries on Windows. */ export function cleanupExistingMetadata(metadataDbPath: string): void { - logger.debug(`Attempting to cleanup metadata database: ${metadataDbPath}`); - - // Close database if open to release locks - DatabaseConnection.close(metadataDbPath); - - // Retry a few times - on Windows, memory-mapped files may be held briefly by other processes - const maxRetries = 5; - - for (let attempt = 1; attempt <= maxRetries; attempt++) { - try { - fs.rmSync(metadataDbPath, { recursive: true, force: true }); - logger.info(`🧹 Cleaned up metadata database: ${metadataDbPath}`); + logger.debug(`Cleaning up metadata database: ${metadataDbPath}`); + + try { + fs.rmSync(metadataDbPath, { recursive: true, force: true }); + logger.info(`🧹 Cleaned up metadata database: ${metadataDbPath}`); + } catch (error) { + const code = + error instanceof Error && "code" in error + ? (error as NodeJS.ErrnoException).code + : undefined; + const message = error instanceof Error ? error.message : String(error); + + if (code === "ENOENT") { + logger.debug( + `Metadata database already deleted or doesn't exist: ${metadataDbPath}`, + ); return; - } catch (error) { - const code = - error instanceof Error && "code" in error - ? (error as NodeJS.ErrnoException).code - : undefined; - const message = error instanceof Error ? error.message : String(error); - - if (code === "ENOENT") { - // Ignore if file doesn't exist - logger.debug( - `Metadata database already deleted or doesn't exist: ${metadataDbPath}`, - ); - return; - } - - const isRetryable = code === "EBUSY" || code === "EPERM"; - if (isRetryable && attempt < maxRetries) { - logger.debug(`Cleanup attempt ${attempt} failed, retrying...`); - sleepSync(200); - continue; - } - - logger.warn(`Failed to cleanup metadata database: ${message}`); } + + logger.warn(`Failed to cleanup metadata database: ${message}`); } } @@ -161,70 +177,3 @@ export function getMetadataPath(config: PathConfig): string { : METADATA_DIR_BUILD; return path.join(getLingoDir(config), dirname); } - -export class MetadataManager { - private db: RootDatabase; - - constructor(dbPath: string) { - this.db = DatabaseConnection.get(dbPath); - } - - /** - * Load metadata from LMDB database. - * Returns empty metadata if database is empty. - */ - loadMetadata(): MetadataSchema { - const entries: Record = {}; - - for (const { key, value } of this.db.getRange()) { - const keyStr = key as string; - if (keyStr !== STATS_KEY) { - entries[keyStr] = value as TranslationEntry; - } - } - - const stats = this.db.get(STATS_KEY) as MetadataSchema["stats"] | undefined; - if (Object.keys(entries).length === 0 && !stats) { - return createEmptyMetadata(); - } - return { - entries, - stats: stats || { - totalEntries: Object.keys(entries).length, - lastUpdated: new Date().toISOString(), - }, - }; - } - - /** - * Save entries to database - LMDB handles concurrency via MVCC. - * - * @param entries - Translation entries to add/update - * @returns The updated metadata schema - */ - async saveMetadataWithEntries( - entries: TranslationEntry[], - ): Promise { - await this.db.transaction(() => { - for (const entry of entries) { - this.db.put(entry.hash, entry); - } - - // Count entries explicitly (excluding stats key) for clarity - let entryCount = 0; - for (const { key } of this.db.getRange()) { - if (key !== STATS_KEY) { - entryCount++; - } - } - - const stats = { - totalEntries: entryCount, - lastUpdated: new Date().toISOString(), - }; - this.db.put(STATS_KEY, stats); - }); - - return this.loadMetadata(); - } -} diff --git a/packages/new-compiler/src/plugin/build-translator.ts b/packages/new-compiler/src/plugin/build-translator.ts index 7a04adb2b..a46f8fc5c 100644 --- a/packages/new-compiler/src/plugin/build-translator.ts +++ b/packages/new-compiler/src/plugin/build-translator.ts @@ -11,9 +11,16 @@ import fs from "fs/promises"; import path from "path"; import type { LingoConfig, MetadataSchema } from "../types"; import { logger } from "../utils/logger"; -import { startTranslationServer, type TranslationServer, } from "../translation-server"; +import { + startTranslationServer, + type TranslationServer, +} from "../translation-server"; import { loadMetadata } from "../metadata/manager"; -import { createCache, type TranslationCache, TranslationService, } from "../translators"; +import { + createCache, + type TranslationCache, + TranslationService, +} from "../translators"; import { dictionaryFrom } from "../translators/api"; import type { LocaleCode } from "lingo.dev/spec"; @@ -64,7 +71,7 @@ export async function processBuildTranslations( logger.info(`🌍 Build mode: ${buildMode}`); - const metadata = loadMetadata(metadataFilePath); + const metadata = await loadMetadata(metadataFilePath); if (!metadata || Object.keys(metadata.entries).length === 0) { logger.info("No translations to process (metadata is empty)"); diff --git a/packages/new-compiler/src/plugin/next-compiler-loader.ts b/packages/new-compiler/src/plugin/next-compiler-loader.ts index 04eaeb033..f30d1d7ee 100644 --- a/packages/new-compiler/src/plugin/next-compiler-loader.ts +++ b/packages/new-compiler/src/plugin/next-compiler-loader.ts @@ -1,7 +1,7 @@ import type { LingoConfig } from "../types"; import { transformComponent } from "./transform"; import { logger } from "../utils/logger"; -import { MetadataManager } from "../metadata/manager"; +import { saveMetadata } from "../metadata/manager"; /** * Turbopack/Webpack loader for automatic translation @@ -28,8 +28,6 @@ export default async function nextCompilerLoader( const config: LingoConfig & { metadataFilePath: string } = this.getOptions(); - const metadataManager = new MetadataManager(config.metadataFilePath); - logger.debug(`[Turbopack Loader] Processing: ${this.resourcePath}`); // Transform the component @@ -46,7 +44,7 @@ export default async function nextCompilerLoader( // Update metadata with new entries if (result.newEntries && result.newEntries.length > 0) { - await metadataManager.saveMetadataWithEntries(result.newEntries); + await saveMetadata(config.metadataFilePath, result.newEntries); logger.debug( `[Turbopack Loader] Found ${result.newEntries.length} translatable text(s) in ${this.resourcePath}`, diff --git a/packages/new-compiler/src/plugin/unplugin.ts b/packages/new-compiler/src/plugin/unplugin.ts index a0f29f68b..09d8a54cb 100644 --- a/packages/new-compiler/src/plugin/unplugin.ts +++ b/packages/new-compiler/src/plugin/unplugin.ts @@ -12,7 +12,7 @@ import { import { cleanupExistingMetadata, getMetadataPath as rawGetMetadataPath, - MetadataManager, + saveMetadata, } from "../metadata/manager"; import { createLingoConfig } from "../utils/config-factory"; import { logger } from "../utils/logger"; @@ -353,11 +353,9 @@ export const lingoUnplugin = createUnplugin< logger.debug(`No transformation needed for ${id}`); return null; } - const metadataManager = new MetadataManager(getMetadataPath()); - // Update metadata with new entries (thread-safe) if (result.newEntries && result.newEntries.length > 0) { - await metadataManager.saveMetadataWithEntries(result.newEntries); + await saveMetadata(getMetadataPath(), result.newEntries); // Track stats for observability totalEntriesCount += result.newEntries.length; diff --git a/packages/new-compiler/src/translation-server/translation-server.ts b/packages/new-compiler/src/translation-server/translation-server.ts index 10db2a873..7c9695cd6 100644 --- a/packages/new-compiler/src/translation-server/translation-server.ts +++ b/packages/new-compiler/src/translation-server/translation-server.ts @@ -313,9 +313,9 @@ export class TranslationServer { * Reload metadata from disk * Useful when metadata has been updated during runtime (e.g., new transformations) */ - reloadMetadata(): void { + async reloadMetadata(): Promise { try { - this.metadata = loadMetadata(getMetadataPath(this.config)); + this.metadata = await loadMetadata(getMetadataPath(this.config)); this.logger.debug( `Reloaded metadata: ${Object.keys(this.metadata.entries).length} entries`, ); @@ -344,7 +344,7 @@ export class TranslationServer { // Always reload metadata to get the latest entries // This is critical for build-time translation where metadata is updated // continuously as files are transformed - this.reloadMetadata(); + await this.reloadMetadata(); if (!this.metadata) { throw new Error("Failed to load metadata"); @@ -651,7 +651,7 @@ export class TranslationServer { } // Reload metadata to ensure we have the latest entries // (new entries may have been added since server started) - this.reloadMetadata(); + await this.reloadMetadata(); if (!this.metadata) { throw new Error("Failed to load metadata"); @@ -713,7 +713,7 @@ export class TranslationServer { // Reload metadata to ensure we have the latest entries // (new entries may have been added since server started) - this.reloadMetadata(); + await this.reloadMetadata(); if (!this.metadata) { throw new Error("Failed to load metadata"); From c82ee97cbe2051f91fc406a7c917847a5d3070ed Mon Sep 17 00:00:00 2001 From: AndreyHirsa Date: Wed, 4 Feb 2026 09:18:13 +0300 Subject: [PATCH 5/5] fix(new-compiler): optimization fixes --- .../new-compiler/src/metadata/manager.test.ts | 29 +++---- packages/new-compiler/src/metadata/manager.ts | 82 +++++++------------ 2 files changed, 41 insertions(+), 70 deletions(-) diff --git a/packages/new-compiler/src/metadata/manager.test.ts b/packages/new-compiler/src/metadata/manager.test.ts index 19a0d6784..1d7346374 100644 --- a/packages/new-compiler/src/metadata/manager.test.ts +++ b/packages/new-compiler/src/metadata/manager.test.ts @@ -108,11 +108,12 @@ describe("metadata", () => { expect((await loadMetadata(testDbPath)).stats!.totalEntries).toBe(3); // Update existing entry (count should not increase) - const result = await saveMetadata(testDbPath, [ + await saveMetadata(testDbPath, [ createTestEntry({ hash: "entry-1", sourceText: "v2" }), ]); - expect(result.stats!.totalEntries).toBe(3); - expect(result.entries["entry-1"].sourceText).toBe("v2"); + const updated = await loadMetadata(testDbPath); + expect(updated.stats!.totalEntries).toBe(3); + expect(updated.entries["entry-1"].sourceText).toBe("v2"); // Empty array should not change anything await saveMetadata(testDbPath, []); @@ -120,20 +121,20 @@ describe("metadata", () => { }); it("should handle large batch of entries", async () => { - const entries = Array.from({ length: 1000 }, (_, i) => + const entries = Array.from({ length: 100 }, (_, i) => createTestEntry({ hash: `batch-${i}` }), ); - const result = await saveMetadata(testDbPath, entries); - expect(result.stats!.totalEntries).toBe(1000); + await saveMetadata(testDbPath, entries); + expect((await loadMetadata(testDbPath)).stats!.totalEntries).toBe(100); }); it("should maintain data integrity after many operations", async () => { // Many saves with overlapping keys - for (let i = 0; i < 50; i++) { + for (let i = 0; i < 10; i++) { await saveMetadata(testDbPath, [ createTestEntry({ - hash: `persistent-${i % 10}`, + hash: `persistent-${i % 5}`, sourceText: `v${i}`, }), createTestEntry({ hash: `unique-${i}` }), @@ -141,19 +142,15 @@ describe("metadata", () => { } const final = await loadMetadata(testDbPath); - // 10 persistent + 50 unique = 60 - expect(final.stats!.totalEntries).toBe(60); - - // Verify save result matches load result - const saveResult = await saveMetadata(testDbPath, []); - expect(saveResult.stats!.totalEntries).toBe(final.stats!.totalEntries); + // 5 persistent + 10 unique = 15 + expect(final.stats!.totalEntries).toBe(15); }); }); describe("concurrent access (single process)", () => { it("should handle concurrent operations from multiple calls", async () => { // LMDB handles concurrent writes via OS-level locking - const promises = Array.from({ length: 20 }, async (_, i) => { + const promises = Array.from({ length: 10 }, async (_, i) => { await saveMetadata(testDbPath, [ createTestEntry({ hash: `concurrent-${i}` }), ]); @@ -161,7 +158,7 @@ describe("metadata", () => { await Promise.all(promises); // Verify all entries are present - expect((await loadMetadata(testDbPath)).stats!.totalEntries).toBe(20); + expect((await loadMetadata(testDbPath)).stats!.totalEntries).toBe(10); }); }); diff --git a/packages/new-compiler/src/metadata/manager.ts b/packages/new-compiler/src/metadata/manager.ts index 5c590ea26..e51452d51 100644 --- a/packages/new-compiler/src/metadata/manager.ts +++ b/packages/new-compiler/src/metadata/manager.ts @@ -5,41 +5,41 @@ import type { MetadataSchema, PathConfig, TranslationEntry } from "../types"; import { getLingoDir } from "../utils/path-helpers"; import { logger } from "../utils/logger"; -// Special key for storing stats const STATS_KEY = "__stats__"; - -// Metadata directory names for each environment const METADATA_DIR_DEV = "metadata-dev"; const METADATA_DIR_BUILD = "metadata-build"; /** - * Opens an LMDB database connection at the given path. + * Opens a short-lived LMDB connection. + * + * Short-lived over singleton: bundlers (Webpack/Next.js) spawn isolated workers + * that can't share a single connection. LMDB's MVCC handles concurrent access. */ -function openDatabase(dbPath: string): RootDatabase { +function openDatabaseConnection(dbPath: string): RootDatabase { fs.mkdirSync(dbPath, { recursive: true }); - // Build mode: disable fsync - metadata is deleted immediately after build, so durability is not needed. - // Dev mode: keep sync enabled for consistency during long-running sessions. const isBuildMode = dbPath.endsWith(METADATA_DIR_BUILD); try { return open({ path: dbPath, compression: true, + // Build: skip fsync (data is ephemeral). Dev: sync for durability. noSync: isBuildMode, }); } catch (error) { const message = error instanceof Error ? error.message : String(error); - throw new Error( - `Failed to open LMDB metadata database at ${dbPath}. Error: ${message}`, - ); + throw new Error(`Failed to open LMDB at ${dbPath}: ${message}`); } } /** - * Safely close database connection. + * Releases file handles to allow directory cleanup (avoids EBUSY/EPERM on Windows). */ -async function closeDatabase(db: RootDatabase, dbPath: string): Promise { +async function closeDatabaseConnection( + db: RootDatabase, + dbPath: string, +): Promise { try { await db.close(); } catch (e) { @@ -47,10 +47,6 @@ async function closeDatabase(db: RootDatabase, dbPath: string): Promise { } } -/** - * Read all entries from an open database. - * Internal helper - does not manage connection lifecycle. - */ function readEntriesFromDb(db: RootDatabase): MetadataSchema { const entries: Record = {}; @@ -84,62 +80,46 @@ export function createEmptyMetadata(): MetadataSchema { }; } -/** - * Load metadata from LMDB database. - */ export async function loadMetadata(dbPath: string): Promise { - const db = openDatabase(dbPath); + const db = openDatabaseConnection(dbPath); try { return readEntriesFromDb(db); } finally { - await closeDatabase(db, dbPath); + await closeDatabaseConnection(db, dbPath); } } /** - * Save translation entries to the metadata database. + * Persists translation entries to LMDB. * - * LMDB handles concurrency via MVCC, so multiple processes can write safely. - * - * @param dbPath - Path to the LMDB database directory - * @param entries - Translation entries to add/update - * @returns The updated metadata schema + * Uses transactionSync to batch all writes into a single commit. + * Async transactions are slow in Vite (~80-100ms) due to setImmediate scheduling. */ export async function saveMetadata( dbPath: string, entries: TranslationEntry[], -): Promise { - const db = openDatabase(dbPath); +): Promise { + const db = openDatabaseConnection(dbPath); + try { - await db.transaction(() => { + db.transactionSync(() => { for (const entry of entries) { - db.put(entry.hash, entry); + db.putSync(entry.hash, entry); } - // Count entries explicitly (excluding stats key) for clarity - let entryCount = 0; - for (const { key } of db.getRange()) { - if (key !== STATS_KEY) { - entryCount++; - } - } - - const stats = { + const totalKeys = db.getKeysCount(); + const entryCount = + db.get(STATS_KEY) !== undefined ? totalKeys - 1 : totalKeys; + db.putSync(STATS_KEY, { totalEntries: entryCount, lastUpdated: new Date().toISOString(), - }; - db.put(STATS_KEY, stats); + }); }); - - return readEntriesFromDb(db); } finally { - await closeDatabase(db, dbPath); + await closeDatabaseConnection(db, dbPath); } } -/** - * Clean up the metadata database directory. - */ export function cleanupExistingMetadata(metadataDbPath: string): void { logger.debug(`Cleaning up metadata database: ${metadataDbPath}`); @@ -164,12 +144,6 @@ export function cleanupExistingMetadata(metadataDbPath: string): void { } } -/** - * Get the absolute path to the metadata database directory - * - * @param config - Config with sourceRoot, lingoDir, and environment - * @returns Absolute path to metadata database directory - */ export function getMetadataPath(config: PathConfig): string { const dirname = config.environment === "development"