diff --git a/src/lib/build_cache.ts b/src/lib/build_cache.ts index d5bc1c831d..7e8d33a83e 100644 --- a/src/lib/build_cache.ts +++ b/src/lib/build_cache.ts @@ -5,8 +5,11 @@ import {z} from 'zod'; import type {Logger} from '@fuzdev/fuz_util/log.js'; import {git_current_commit_hash} from '@fuzdev/fuz_util/git.js'; import {fs_exists} from '@fuzdev/fuz_util/fs.js'; -import {map_concurrent} from '@fuzdev/fuz_util/async.js'; -import {hash_secure} from '@fuzdev/fuz_util/hash.js'; +import { + collect_file_snapshot, + validate_file_snapshot, + type FileSnapshotEntry, +} from '@fuzdev/fuz_util/file_snapshot.js'; import type {GroConfig} from './gro_config.ts'; import {paths} from './paths.ts'; @@ -148,42 +151,14 @@ export const save_build_cache_metadata = async ( * Validates that a cached build is still valid by checking stats and hashing outputs. * Uses size as a fast negative check before expensive hashing. * This is comprehensive validation to catch manual tampering or corruption. + * + * Delegates to `validate_file_snapshot` from `@fuzdev/fuz_util`. */ export const validate_build_cache = async (metadata: BuildCacheMetadata): Promise => { - // Verify all tracked output files exist and have matching size - // Sequential checks with early return for performance - for (const output of metadata.outputs) { - // eslint-disable-next-line no-await-in-loop - if (!(await fs_exists(output.path))) { - return false; - } - - // Fast negative check: size mismatch = definitely invalid - // This avoids expensive file reads and hashing for files that have clearly changed - // eslint-disable-next-line no-await-in-loop - const stats = await stat(output.path); - if (stats.size !== output.size) { - return false; - } - } - - // Size matches for all files - now verify content with cryptographic hashing - // Hash files with controlled concurrency (could be 10k+ files) - const results = await map_concurrent( - metadata.outputs, - async (output) => { - try { - const contents = await readFile(output.path); - const actual_hash = await hash_secure(contents); - return actual_hash === output.hash; - } catch { - // File deleted/inaccessible between checks = cache invalid - return false; - } - }, - 20, - ); - return results.every((valid) => valid); + return validate_file_snapshot({ + entries: metadata.outputs, + concurrency: 20, + }); }; /** @@ -231,83 +206,36 @@ export const is_build_cache_valid = async ( return true; }; +/** + * Maps a `FileSnapshotEntry` (with all fields enabled) to a `BuildOutputEntry`. + */ +const to_build_output_entry = (entry: FileSnapshotEntry): BuildOutputEntry => ({ + path: entry.path, + hash: entry.hash!, + size: entry.size!, + mtime: entry.mtime!, + ctime: entry.ctime!, + mode: entry.mode!, +}); + /** * Collects information about all files in build output directories. * Returns an array of entries with path, hash, size, mtime, ctime, and mode. * - * Files are hashed in parallel for performance. For very large builds (10k+ files), - * this may take several seconds but ensures complete cache validation. + * Delegates to `collect_file_snapshot` from `@fuzdev/fuz_util`. * * @param build_dirs Array of output directories to scan (e.g., ['build', 'dist', 'dist_server']) */ export const collect_build_outputs = async ( build_dirs: Array, ): Promise> => { - // Collect all files to hash first - interface FileEntry { - full_path: string; - cache_key: string; - } - - const files_hash_secure: Array = []; - - // Recursively collect files - const collect_files = async ( - dir: string, - relative_base: string, - dir_prefix: string, - ): Promise => { - const entries = await readdir(dir, {withFileTypes: true}); - - for (const entry of entries) { - // Skip metadata file itself - if (entry.name === BUILD_CACHE_METADATA_FILENAME) { - continue; - } - - const full_path = join(dir, entry.name); - const relative_path = relative_base ? join(relative_base, entry.name) : entry.name; - const cache_key = join(dir_prefix, relative_path); - - if (entry.isDirectory()) { - // eslint-disable-next-line no-await-in-loop - await collect_files(full_path, relative_path, dir_prefix); - } else if (entry.isFile()) { - files_hash_secure.push({full_path, cache_key}); - } - // Symlinks are intentionally ignored - we only hash regular files - } - }; - - // Collect files from all build directories sequentially - for (const build_dir of build_dirs) { - // eslint-disable-next-line no-await-in-loop - if (!(await fs_exists(build_dir))) { - continue; // Skip non-existent directories - } - // eslint-disable-next-line no-await-in-loop - await collect_files(build_dir, '', build_dir); - } - - // Hash files with controlled concurrency and collect stats (could be 10k+ files) - return map_concurrent( - files_hash_secure, - async ({full_path, cache_key}): Promise => { - const stats = await stat(full_path); - const contents = await readFile(full_path); - const hash = await hash_secure(contents); - - return { - path: cache_key, - hash, - size: stats.size, - mtime: stats.mtimeMs, - ctime: stats.ctimeMs, - mode: stats.mode, - }; - }, - 20, - ); + const entries = await collect_file_snapshot({ + dirs: build_dirs, + fields: {hash: true, size: true, mtime: true, ctime: true, mode: true}, + filter: (path) => !path.endsWith(BUILD_CACHE_METADATA_FILENAME), + concurrency: 20, + }); + return entries.map(to_build_output_entry); }; /** diff --git a/src/routes/library.json b/src/routes/library.json index 023be06c50..c1e7cf80ea 100644 --- a/src/routes/library.json +++ b/src/routes/library.json @@ -281,34 +281,34 @@ { "name": "BUILD_CACHE_METADATA_FILENAME", "kind": "variable", - "source_line": 15, + "source_line": 18, "type_signature": "\"build.json\"" }, { "name": "BUILD_CACHE_VERSION", "kind": "variable", - "source_line": 16, + "source_line": 19, "type_signature": "\"1\"" }, { "name": "BuildOutputEntry", "kind": "type", "doc_comment": "Metadata about a single build output file.\nIncludes cryptographic hash for validation plus filesystem stats for debugging and optimization.", - "source_line": 22, + "source_line": 25, "type_signature": "ZodObject<{ path: ZodString; hash: ZodString; size: ZodNumber; mtime: ZodNumber; ctime: ZodNumber; mode: ZodNumber; }, $strict>" }, { "name": "BuildCacheMetadata", "kind": "type", "doc_comment": "Metadata stored in .gro/ directory to track build cache validity.\nSchema validates structure at load time to catch corrupted cache files.", - "source_line": 40, + "source_line": 43, "type_signature": "ZodObject<{ version: ZodString; git_commit: ZodNullable; build_cache_config_hash: ZodString; timestamp: ZodString; outputs: ZodArray<...>; }, $strict>" }, { "name": "compute_build_cache_key", "kind": "function", "doc_comment": "Computes the cache key components for a build.\nThis determines whether a cached build can be reused.", - "source_line": 61, + "source_line": 64, "type_signature": "(config: GroConfig, log: Logger, git_commit?: string | null | undefined): Promise<{ git_commit: string | null; build_cache_config_hash: string; }>", "return_type": "Promise<{ git_commit: string | null; build_cache_config_hash: string; }>", "parameters": [ @@ -334,7 +334,7 @@ "name": "load_build_cache_metadata", "kind": "function", "doc_comment": "Loads build cache metadata from .gro/ directory.\nInvalid or corrupted cache files are automatically deleted.", - "source_line": 86, + "source_line": 89, "type_signature": "(): Promise<{ version: string; git_commit: string | null; build_cache_config_hash: string; timestamp: string; outputs: { path: string; hash: string; size: number; mtime: number; ctime: number; mode: number; }[]; } | null>", "return_type": "Promise<{ version: string; git_commit: string | null; build_cache_config_hash: string; timestamp: string; outputs: { path: string; hash: string; size: number; mtime: number; ctime: number; mode: number; }[]; } | null>", "parameters": [] @@ -343,7 +343,7 @@ "name": "save_build_cache_metadata", "kind": "function", "doc_comment": "Saves build cache metadata to .gro/ directory.\nErrors are logged but don't fail the build (cache is optional).", - "source_line": 128, + "source_line": 131, "type_signature": "(metadata: { version: string; git_commit: string | null; build_cache_config_hash: string; timestamp: string; outputs: { path: string; hash: string; size: number; mtime: number; ctime: number; mode: number; }[]; }, log?: Logger | undefined): Promise<...>", "return_type": "Promise", "parameters": [ @@ -361,8 +361,8 @@ { "name": "validate_build_cache", "kind": "function", - "doc_comment": "Validates that a cached build is still valid by checking stats and hashing outputs.\nUses size as a fast negative check before expensive hashing.\nThis is comprehensive validation to catch manual tampering or corruption.", - "source_line": 152, + "doc_comment": "Validates that a cached build is still valid by checking stats and hashing outputs.\nUses size as a fast negative check before expensive hashing.\nThis is comprehensive validation to catch manual tampering or corruption.\n\nDelegates to `validate_file_snapshot` from `@fuzdev/fuz_util`.", + "source_line": 157, "type_signature": "(metadata: { version: string; git_commit: string | null; build_cache_config_hash: string; timestamp: string; outputs: { path: string; hash: string; size: number; mtime: number; ctime: number; mode: number; }[]; }): Promise<...>", "return_type": "Promise", "parameters": [ @@ -376,7 +376,7 @@ "name": "is_build_cache_valid", "kind": "function", "doc_comment": "Main function to check if the build cache is valid.\nReturns true if the cached build can be used, false if a fresh build is needed.", - "source_line": 197, + "source_line": 172, "type_signature": "(config: GroConfig, log: Logger, git_commit?: string | null | undefined): Promise", "return_type": "Promise", "parameters": [ @@ -401,8 +401,8 @@ { "name": "collect_build_outputs", "kind": "function", - "doc_comment": "Collects information about all files in build output directories.\nReturns an array of entries with path, hash, size, mtime, ctime, and mode.\n\nFiles are hashed in parallel for performance. For very large builds (10k+ files),\nthis may take several seconds but ensures complete cache validation.", - "source_line": 243, + "doc_comment": "Collects information about all files in build output directories.\nReturns an array of entries with path, hash, size, mtime, ctime, and mode.\n\nDelegates to `collect_file_snapshot` from `@fuzdev/fuz_util`.", + "source_line": 229, "type_signature": "(build_dirs: string[]): Promise<{ path: string; hash: string; size: number; mtime: number; ctime: number; mode: number; }[]>", "return_type": "Promise<{ path: string; hash: string; size: number; mtime: number; ctime: number; mode: number; }[]>", "parameters": [ @@ -417,7 +417,7 @@ "name": "discover_build_output_dirs", "kind": "function", "doc_comment": "Discovers all build output directories in the current working directory.\nReturns an array of directory names that exist: build/, dist/, dist_*", - "source_line": 317, + "source_line": 245, "type_signature": "(): Promise", "return_type": "Promise", "parameters": [] @@ -426,7 +426,7 @@ "name": "create_build_cache_metadata", "kind": "function", "doc_comment": "Creates build cache metadata after a successful build.\nAutomatically discovers all build output directories (build/, dist/, dist_*).", - "source_line": 362, + "source_line": 290, "type_signature": "(config: GroConfig, log: Logger, git_commit?: string | null | undefined, build_dirs?: string[] | undefined): Promise<{ version: string; git_commit: string | null; build_cache_config_hash: string; timestamp: string; outputs: { ...; }[]; }>", "return_type": "Promise<{ version: string; git_commit: string | null; build_cache_config_hash: string; timestamp: string; outputs: { path: string; hash: string; size: number; mtime: number; ctime: number; mode: number; }[]; }>", "parameters": [ diff --git a/src/test/build_cache.creation.test.ts b/src/test/build_cache.creation.test.ts index 09580265ab..ddc3decfb2 100644 --- a/src/test/build_cache.creation.test.ts +++ b/src/test/build_cache.creation.test.ts @@ -1,14 +1,12 @@ import {describe, test, expect, vi, beforeEach} from 'vitest'; +import type {FileSnapshotEntry} from '@fuzdev/fuz_util/file_snapshot.js'; import {create_build_cache_metadata} from '../lib/build_cache.ts'; import { create_mock_logger, create_mock_config, - mock_file_stats, mock_dir_stats, - mock_file_entry, - mock_dir_entry, } from './build_cache_test_helpers.ts'; // Mock dependencies @@ -16,20 +14,21 @@ vi.mock('@fuzdev/fuz_util/git.js', () => ({ git_current_commit_hash: vi.fn(), })); -// Mock async fs functions for discover_build_output_dirs and collect_build_outputs +// Mock async fs functions for discover_build_output_dirs (still uses node:fs/promises directly) vi.mock('node:fs/promises', () => ({ readdir: vi.fn(), stat: vi.fn(), - readFile: vi.fn(), })); -// Mock fs_exists from fuz_util +// Mock fs_exists from fuz_util (used by discover_build_output_dirs) vi.mock('@fuzdev/fuz_util/fs.js', () => ({ fs_exists: vi.fn(), })); -vi.mock('@fuzdev/fuz_util/hash.js', () => ({ - hash_secure: vi.fn(), +// Mock file_snapshot from fuz_util (used by collect_build_outputs) +vi.mock('@fuzdev/fuz_util/file_snapshot.js', () => ({ + collect_file_snapshot: vi.fn(), + validate_file_snapshot: vi.fn(), })); describe('create_build_cache_metadata', () => { @@ -45,15 +44,10 @@ describe('create_build_cache_metadata', () => { test('creates complete metadata object', async () => { const {git_current_commit_hash} = await import('@fuzdev/fuz_util/git.js'); - const {fs_exists} = vi.mocked(await import('@fuzdev/fuz_util/fs.js')); - const {readdir, stat} = vi.mocked(await import('node:fs/promises')); - const {hash_secure} = await import('@fuzdev/fuz_util/hash.js'); + const {collect_file_snapshot} = vi.mocked(await import('@fuzdev/fuz_util/file_snapshot.js')); vi.mocked(git_current_commit_hash).mockResolvedValue('abc123'); - vi.mocked(fs_exists).mockResolvedValue(false); - vi.mocked(readdir).mockResolvedValue([] as any); - vi.mocked(stat).mockResolvedValue(mock_dir_stats()); - vi.mocked(hash_secure).mockResolvedValue('hash123'); + vi.mocked(collect_file_snapshot).mockResolvedValue([]); const config = await create_mock_config(); const log = create_mock_logger(); @@ -71,31 +65,36 @@ describe('create_build_cache_metadata', () => { test('creates metadata with actual build outputs', async () => { const {git_current_commit_hash} = await import('@fuzdev/fuz_util/git.js'); const {fs_exists} = vi.mocked(await import('@fuzdev/fuz_util/fs.js')); - const {readdir, stat, readFile} = vi.mocked(await import('node:fs/promises')); - const {hash_secure} = await import('@fuzdev/fuz_util/hash.js'); + const {readdir} = vi.mocked(await import('node:fs/promises')); + const {collect_file_snapshot} = vi.mocked(await import('@fuzdev/fuz_util/file_snapshot.js')); vi.mocked(git_current_commit_hash).mockResolvedValue('abc123'); vi.mocked(fs_exists).mockImplementation((path: any) => Promise.resolve(path === 'build')); - // Set up async mocks for discover_build_output_dirs and collect_build_outputs vi.mocked(readdir).mockImplementation((path: any) => { if (path === '.') return Promise.resolve([] as any); - if (path === 'build') { - return Promise.resolve([ - mock_file_entry('index.html'), - mock_file_entry('bundle.js'), - ] as any); - } return Promise.resolve([] as any); }); - vi.mocked(stat).mockImplementation((path: any) => { - if (String(path) === 'build') return Promise.resolve(mock_dir_stats()); - return Promise.resolve(mock_file_stats()); - }); - vi.mocked(readFile).mockResolvedValue(Buffer.from('content')); - let hash_count = 0; - // eslint-disable-next-line @typescript-eslint/require-await - vi.mocked(hash_secure).mockImplementation(async () => `hash${++hash_count}`); + // Mock collect_file_snapshot to return build outputs + const mock_entries: Array = [ + { + path: 'build/index.html', + hash: 'hash1', + size: 1024, + mtime: 1729512000000, + ctime: 1729512000000, + mode: 33188, + }, + { + path: 'build/bundle.js', + hash: 'hash2', + size: 1024, + mtime: 1729512000000, + ctime: 1729512000000, + mode: 33188, + }, + ]; + vi.mocked(collect_file_snapshot).mockResolvedValue(mock_entries); const config = await create_mock_config(); const log = create_mock_logger(); @@ -105,12 +104,12 @@ describe('create_build_cache_metadata', () => { expect(result.outputs).toHaveLength(2); expect(result.outputs[0]).toMatchObject({ path: 'build/index.html', - hash: 'hash2', // hash1 is for config hash + hash: 'hash1', size: 1024, }); expect(result.outputs[1]).toMatchObject({ path: 'build/bundle.js', - hash: 'hash3', + hash: 'hash2', size: 1024, }); }); @@ -118,35 +117,49 @@ describe('create_build_cache_metadata', () => { test('creates metadata with multiple build directories', async () => { const {git_current_commit_hash} = await import('@fuzdev/fuz_util/git.js'); const {fs_exists} = vi.mocked(await import('@fuzdev/fuz_util/fs.js')); - const {readdir, stat, readFile} = vi.mocked(await import('node:fs/promises')); - const {hash_secure} = await import('@fuzdev/fuz_util/hash.js'); + const {readdir, stat} = vi.mocked(await import('node:fs/promises')); + const {collect_file_snapshot} = vi.mocked(await import('@fuzdev/fuz_util/file_snapshot.js')); vi.mocked(git_current_commit_hash).mockResolvedValue('abc123'); vi.mocked(fs_exists).mockResolvedValue(true); - // Set up async mocks for discover_build_output_dirs and collect_build_outputs + // Set up async mocks for discover_build_output_dirs vi.mocked(readdir).mockImplementation((path: any) => { if (path === '.') { return Promise.resolve(['dist_server', 'src', 'node_modules'] as any); } - if (path === 'build') { - return Promise.resolve([mock_file_entry('app.js')] as any); - } - if (path === 'dist') { - return Promise.resolve([mock_file_entry('lib.js')] as any); - } - if (path === 'dist_server') { - return Promise.resolve([mock_file_entry('server.js')] as any); - } return Promise.resolve([] as any); }); - vi.mocked(stat).mockResolvedValue(mock_dir_stats()); - vi.mocked(readFile).mockResolvedValue(Buffer.from('content')); - let hash_count = 0; - // eslint-disable-next-line @typescript-eslint/require-await - vi.mocked(hash_secure).mockImplementation(async () => `hash${++hash_count}`); + // Mock collect_file_snapshot to return files from all dirs + const mock_entries: Array = [ + { + path: 'build/app.js', + hash: 'hash1', + size: 1024, + mtime: 1729512000000, + ctime: 1729512000000, + mode: 33188, + }, + { + path: 'dist/lib.js', + hash: 'hash2', + size: 1024, + mtime: 1729512000000, + ctime: 1729512000000, + mode: 33188, + }, + { + path: 'dist_server/server.js', + hash: 'hash3', + size: 1024, + mtime: 1729512000000, + ctime: 1729512000000, + mode: 33188, + }, + ]; + vi.mocked(collect_file_snapshot).mockResolvedValue(mock_entries); const config = await create_mock_config(); const log = create_mock_logger(); @@ -163,19 +176,16 @@ describe('create_build_cache_metadata', () => { test('handles empty build directories', async () => { const {git_current_commit_hash} = await import('@fuzdev/fuz_util/git.js'); const {fs_exists} = vi.mocked(await import('@fuzdev/fuz_util/fs.js')); - const {readdir, stat} = vi.mocked(await import('node:fs/promises')); - const {hash_secure} = await import('@fuzdev/fuz_util/hash.js'); + const {readdir} = vi.mocked(await import('node:fs/promises')); + const {collect_file_snapshot} = vi.mocked(await import('@fuzdev/fuz_util/file_snapshot.js')); vi.mocked(git_current_commit_hash).mockResolvedValue('abc123'); vi.mocked(fs_exists).mockImplementation((path: any) => Promise.resolve(path === 'build')); - // Set up async mocks for discover_build_output_dirs and collect_build_outputs vi.mocked(readdir).mockImplementation((path: any) => { if (path === '.') return Promise.resolve([] as any); - if (path === 'build') return Promise.resolve([] as any); return Promise.resolve([] as any); }); - vi.mocked(stat).mockResolvedValue({isDirectory: () => true} as any); - vi.mocked(hash_secure).mockResolvedValue('hash123'); + vi.mocked(collect_file_snapshot).mockResolvedValue([]); const config = await create_mock_config(); const log = create_mock_logger(); @@ -190,41 +200,27 @@ describe('create_build_cache_metadata', () => { test('creates metadata with deeply nested file structures', async () => { const {git_current_commit_hash} = await import('@fuzdev/fuz_util/git.js'); const {fs_exists} = vi.mocked(await import('@fuzdev/fuz_util/fs.js')); - const {readdir, stat, readFile} = vi.mocked(await import('node:fs/promises')); - const {hash_secure} = await import('@fuzdev/fuz_util/hash.js'); + const {readdir} = vi.mocked(await import('node:fs/promises')); + const {collect_file_snapshot} = vi.mocked(await import('@fuzdev/fuz_util/file_snapshot.js')); vi.mocked(git_current_commit_hash).mockResolvedValue('abc123'); vi.mocked(fs_exists).mockImplementation((path: any) => Promise.resolve(path === 'build')); - // Set up async mocks for discover_build_output_dirs and collect_build_outputs vi.mocked(readdir).mockImplementation((path: any) => { - const path_str = String(path); - if (path_str === '.') return Promise.resolve([] as any); // no dist_* directories - if (path_str === 'build') { - return Promise.resolve([mock_dir_entry('assets')] as any); - } - if (path_str === 'build/assets') { - return Promise.resolve([mock_dir_entry('js')] as any); - } - if (path_str === 'build/assets/js') { - return Promise.resolve([mock_dir_entry('lib')] as any); - } - if (path_str === 'build/assets/js/lib') { - return Promise.resolve([mock_dir_entry('utils')] as any); - } - if (path_str === 'build/assets/js/lib/utils') { - return Promise.resolve([mock_file_entry('helper.js')] as any); - } + if (path === '.') return Promise.resolve([] as any); return Promise.resolve([] as any); }); - vi.mocked(stat).mockImplementation((path: any) => { - if (String(path).endsWith('.js')) { - return Promise.resolve(mock_file_stats(256)); - } - return Promise.resolve(mock_dir_stats()); - }); - vi.mocked(readFile).mockResolvedValue(Buffer.from('content')); - vi.mocked(hash_secure).mockResolvedValue('deep_hash'); + // Mock collect_file_snapshot to return deeply nested file + vi.mocked(collect_file_snapshot).mockResolvedValue([ + { + path: 'build/assets/js/lib/utils/helper.js', + hash: 'deep_hash', + size: 256, + mtime: 1729512000000, + ctime: 1729512000000, + mode: 33188, + }, + ]); const config = await create_mock_config(); const log = create_mock_logger(); @@ -240,33 +236,33 @@ describe('create_build_cache_metadata', () => { test('handles build directories with many files', async () => { const {git_current_commit_hash} = await import('@fuzdev/fuz_util/git.js'); const {fs_exists} = vi.mocked(await import('@fuzdev/fuz_util/fs.js')); - const {readdir, stat, readFile} = vi.mocked(await import('node:fs/promises')); - const {hash_secure} = await import('@fuzdev/fuz_util/hash.js'); + const {readdir} = vi.mocked(await import('node:fs/promises')); + const {collect_file_snapshot} = vi.mocked(await import('@fuzdev/fuz_util/file_snapshot.js')); vi.mocked(git_current_commit_hash).mockResolvedValue('abc123'); vi.mocked(fs_exists).mockImplementation((path: any) => Promise.resolve(path === 'build')); - - const files = Array.from({length: 15}, (_, i) => mock_file_entry(`file${i}.js`)); - // Set up async mocks for discover_build_output_dirs and collect_build_outputs vi.mocked(readdir).mockImplementation((path: any) => { - if (path === '.') return Promise.resolve([] as any); // no dist_* directories - if (path === 'build') return Promise.resolve(files as any); + if (path === '.') return Promise.resolve([] as any); return Promise.resolve([] as any); }); - vi.mocked(stat).mockResolvedValue(mock_file_stats(2048)); - vi.mocked(readFile).mockResolvedValue(Buffer.from('content')); - - let hash_count = 0; - // eslint-disable-next-line @typescript-eslint/require-await - vi.mocked(hash_secure).mockImplementation(async () => `hash${++hash_count}`); + // Mock collect_file_snapshot to return many files + const mock_entries: Array = Array.from({length: 15}, (_, i) => ({ + path: `build/file${i}.js`, + hash: `hash${i}`, + size: 2048, + mtime: 1729512000000, + ctime: 1729512000000, + mode: 33188, + })); + vi.mocked(collect_file_snapshot).mockResolvedValue(mock_entries); const config = await create_mock_config(); const log = create_mock_logger(); const result = await create_build_cache_metadata(config, log); - // Should hash all 15 files in parallel + // Should have all 15 files expect(result.outputs).toHaveLength(15); // Verify all files are present for (let i = 0; i < 15; i++) { @@ -276,15 +272,10 @@ describe('create_build_cache_metadata', () => { test('creates metadata with null git commit', async () => { const {git_current_commit_hash} = await import('@fuzdev/fuz_util/git.js'); - const {fs_exists} = vi.mocked(await import('@fuzdev/fuz_util/fs.js')); - const {readdir, stat} = vi.mocked(await import('node:fs/promises')); - const {hash_secure} = await import('@fuzdev/fuz_util/hash.js'); + const {collect_file_snapshot} = vi.mocked(await import('@fuzdev/fuz_util/file_snapshot.js')); vi.mocked(git_current_commit_hash).mockResolvedValue(null); - vi.mocked(fs_exists).mockResolvedValue(false); - vi.mocked(readdir).mockResolvedValue([] as any); - vi.mocked(stat).mockResolvedValue(mock_dir_stats()); - vi.mocked(hash_secure).mockResolvedValue('hash123'); + vi.mocked(collect_file_snapshot).mockResolvedValue([]); const config = await create_mock_config(); const log = create_mock_logger(); @@ -300,15 +291,10 @@ describe('create_build_cache_metadata', () => { test('includes correct build_cache_config_hash', async () => { const {git_current_commit_hash} = await import('@fuzdev/fuz_util/git.js'); - const {fs_exists} = vi.mocked(await import('@fuzdev/fuz_util/fs.js')); - const {readdir, stat} = vi.mocked(await import('node:fs/promises')); - const {hash_secure} = await import('@fuzdev/fuz_util/hash.js'); + const {collect_file_snapshot} = vi.mocked(await import('@fuzdev/fuz_util/file_snapshot.js')); vi.mocked(git_current_commit_hash).mockResolvedValue('abc123'); - vi.mocked(fs_exists).mockResolvedValue(false); - vi.mocked(readdir).mockResolvedValue([] as any); - vi.mocked(stat).mockResolvedValue(mock_dir_stats()); - vi.mocked(hash_secure).mockResolvedValue('custom_config_hash'); + vi.mocked(collect_file_snapshot).mockResolvedValue([]); const config = await create_mock_config({ build_cache_config: { @@ -320,8 +306,9 @@ describe('create_build_cache_metadata', () => { const result = await create_build_cache_metadata(config, log); - // Should include the hashed config - expect(result.build_cache_config_hash).toBe('custom_config_hash'); + // Should include a non-empty hashed config + expect(result.build_cache_config_hash).toBeTruthy(); + expect(result.build_cache_config_hash.length).toBeGreaterThan(0); expect(result.git_commit).toBe('abc123'); }); }); diff --git a/src/test/build_cache.discovery.test.ts b/src/test/build_cache.discovery.test.ts index f6c26abd3a..e3991a0eba 100644 --- a/src/test/build_cache.discovery.test.ts +++ b/src/test/build_cache.discovery.test.ts @@ -1,23 +1,23 @@ import {describe, test, expect, vi, beforeEach} from 'vitest'; +import type {FileSnapshotEntry} from '@fuzdev/fuz_util/file_snapshot.js'; import {discover_build_output_dirs, collect_build_outputs} from '../lib/build_cache.ts'; -import {mock_file_stats, mock_file_entry, mock_dir_entry} from './build_cache_test_helpers.ts'; - -// Mock dependencies - discover_build_output_dirs and collect_build_outputs now use async fs functions +// Mock dependencies - discover_build_output_dirs still uses node:fs/promises directly vi.mock('node:fs/promises', () => ({ readdir: vi.fn(), stat: vi.fn(), - readFile: vi.fn(), })); -// Mock fs_exists from fuz_util +// Mock fs_exists from fuz_util (used by discover_build_output_dirs) vi.mock('@fuzdev/fuz_util/fs.js', () => ({ fs_exists: vi.fn(), })); -vi.mock('@fuzdev/fuz_util/hash.js', () => ({ - hash_secure: vi.fn(), +// Mock file_snapshot from fuz_util (used by collect_build_outputs) +vi.mock('@fuzdev/fuz_util/file_snapshot.js', () => ({ + collect_file_snapshot: vi.fn(), + validate_file_snapshot: vi.fn(), })); describe('discover_build_output_dirs', () => { @@ -160,22 +160,28 @@ describe('collect_build_outputs', () => { vi.clearAllMocks(); }); - test('hashes all files in build directory', async () => { - const {fs_exists} = vi.mocked(await import('@fuzdev/fuz_util/fs.js')); - const {readdir, readFile, stat} = vi.mocked(await import('node:fs/promises')); - const {hash_secure} = await import('@fuzdev/fuz_util/hash.js'); - - vi.mocked(fs_exists).mockResolvedValue(true); - vi.mocked(readdir).mockResolvedValue([ - mock_file_entry('index.html'), - mock_file_entry('bundle.js'), - ] as any); - vi.mocked(stat).mockResolvedValue(mock_file_stats()); - vi.mocked(readFile).mockResolvedValue(Buffer.from('content')); - - let hash_count = 0; - // eslint-disable-next-line @typescript-eslint/require-await - vi.mocked(hash_secure).mockImplementation(async () => `hash${++hash_count}`); + test('collects files from build directory via collect_file_snapshot', async () => { + const {collect_file_snapshot} = vi.mocked(await import('@fuzdev/fuz_util/file_snapshot.js')); + + const mock_entries: Array = [ + { + path: 'build/index.html', + hash: 'hash1', + size: 1024, + mtime: 1729512000000, + ctime: 1729512000000, + mode: 33188, + }, + { + path: 'build/bundle.js', + hash: 'hash2', + size: 2048, + mtime: 1729512000000, + ctime: 1729512000000, + mode: 33188, + }, + ]; + vi.mocked(collect_file_snapshot).mockResolvedValue(mock_entries); const result = await collect_build_outputs(['build']); @@ -191,144 +197,102 @@ describe('collect_build_outputs', () => { expect(result[1]).toEqual({ path: 'build/bundle.js', hash: 'hash2', - size: 1024, + size: 2048, mtime: 1729512000000, ctime: 1729512000000, mode: 33188, }); }); - test('skips build.json file', async () => { - const {fs_exists} = vi.mocked(await import('@fuzdev/fuz_util/fs.js')); - const {readdir, readFile, stat} = vi.mocked(await import('node:fs/promises')); - const {hash_secure} = await import('@fuzdev/fuz_util/hash.js'); + test('passes correct options including build.json filter', async () => { + const {collect_file_snapshot} = vi.mocked(await import('@fuzdev/fuz_util/file_snapshot.js')); - vi.mocked(fs_exists).mockResolvedValue(true); - vi.mocked(readdir).mockResolvedValue([ - mock_file_entry('build.json'), - mock_file_entry('index.html'), - ] as any); - vi.mocked(stat).mockResolvedValue(mock_file_stats()); - vi.mocked(readFile).mockResolvedValue(Buffer.from('content')); - vi.mocked(hash_secure).mockResolvedValue('hash'); + vi.mocked(collect_file_snapshot).mockResolvedValue([]); - const result = await collect_build_outputs(['build']); + await collect_build_outputs(['build']); - expect(result).toHaveLength(1); - expect(result.find((o) => o.path === 'build/build.json')).toBeUndefined(); - expect(result.find((o) => o.path === 'build/index.html')).toBeDefined(); - }); - - test('returns empty array for non-existent directory', async () => { - const {fs_exists} = vi.mocked(await import('@fuzdev/fuz_util/fs.js')); - - vi.mocked(fs_exists).mockResolvedValue(false); - - const result = await collect_build_outputs(['build']); + expect(collect_file_snapshot).toHaveBeenCalledWith({ + dirs: ['build'], + fields: {hash: true, size: true, mtime: true, ctime: true, mode: true}, + filter: expect.any(Function), + concurrency: 20, + }); - expect(result).toEqual([]); + // Verify the filter excludes build.json + const call_args = vi.mocked(collect_file_snapshot).mock.calls[0]![0]; + expect(call_args.filter!('build/build.json')).toBe(false); + expect(call_args.filter!('build/index.html')).toBe(true); }); - test('hashes all files in directory', async () => { - const {fs_exists} = vi.mocked(await import('@fuzdev/fuz_util/fs.js')); - const {readdir, readFile, stat} = vi.mocked(await import('node:fs/promises')); - const {hash_secure} = await import('@fuzdev/fuz_util/hash.js'); + test('returns empty array when collect_file_snapshot returns empty', async () => { + const {collect_file_snapshot} = vi.mocked(await import('@fuzdev/fuz_util/file_snapshot.js')); - vi.mocked(fs_exists).mockResolvedValue(true); - vi.mocked(readdir).mockResolvedValue([ - mock_file_entry('file1.js'), - mock_file_entry('file2.js'), - mock_file_entry('file3.js'), - ] as any); - vi.mocked(stat).mockResolvedValue(mock_file_stats()); - vi.mocked(readFile).mockResolvedValue(Buffer.from('content')); - vi.mocked(hash_secure).mockResolvedValue('hash'); + vi.mocked(collect_file_snapshot).mockResolvedValue([]); const result = await collect_build_outputs(['build']); - // Should hash all 3 files - expect(result).toHaveLength(3); - expect(result.find((o) => o.path === 'build/file1.js')).toBeDefined(); - expect(result.find((o) => o.path === 'build/file2.js')).toBeDefined(); - expect(result.find((o) => o.path === 'build/file3.js')).toBeDefined(); + expect(result).toEqual([]); }); - test('hashes files from multiple directories', async () => { - const {fs_exists} = vi.mocked(await import('@fuzdev/fuz_util/fs.js')); - const {readdir, readFile, stat} = vi.mocked(await import('node:fs/promises')); - const {hash_secure} = await import('@fuzdev/fuz_util/hash.js'); - - vi.mocked(fs_exists).mockResolvedValue(true); - - vi.mocked(readdir).mockImplementation((path: any) => { - if (path === 'build') { - return Promise.resolve([mock_file_entry('index.html')] as any); - } - if (path === 'dist') { - return Promise.resolve([mock_file_entry('index.js')] as any); - } - if (path === 'dist_server') { - return Promise.resolve([mock_file_entry('server.js')] as any); - } - return Promise.resolve([] as any); - }); - - vi.mocked(stat).mockResolvedValue(mock_file_stats()); - vi.mocked(readFile).mockResolvedValue(Buffer.from('content')); - - let hash_count = 0; - // eslint-disable-next-line @typescript-eslint/require-await - vi.mocked(hash_secure).mockImplementation(async () => `hash${++hash_count}`); + test('collects files from multiple directories', async () => { + const {collect_file_snapshot} = vi.mocked(await import('@fuzdev/fuz_util/file_snapshot.js')); + + const mock_entries: Array = [ + { + path: 'build/index.html', + hash: 'hash1', + size: 1024, + mtime: 1729512000000, + ctime: 1729512000000, + mode: 33188, + }, + { + path: 'dist/index.js', + hash: 'hash2', + size: 2048, + mtime: 1729512000000, + ctime: 1729512000000, + mode: 33188, + }, + { + path: 'dist_server/server.js', + hash: 'hash3', + size: 512, + mtime: 1729512000000, + ctime: 1729512000000, + mode: 33188, + }, + ]; + vi.mocked(collect_file_snapshot).mockResolvedValue(mock_entries); const result = await collect_build_outputs(['build', 'dist', 'dist_server']); - // Should hash files from all three directories expect(result).toHaveLength(3); - expect(result.find((o) => o.path === 'build/index.html')).toBeDefined(); - expect(result.find((o) => o.path === 'dist/index.js')).toBeDefined(); - expect(result.find((o) => o.path === 'dist_server/server.js')).toBeDefined(); - // Each file should have a unique hash - expect(result.find((o) => o.path === 'build/index.html')?.hash).toBe('hash1'); - expect(result.find((o) => o.path === 'dist/index.js')?.hash).toBe('hash2'); - expect(result.find((o) => o.path === 'dist_server/server.js')?.hash).toBe('hash3'); + expect(collect_file_snapshot).toHaveBeenCalledWith( + expect.objectContaining({dirs: ['build', 'dist', 'dist_server']}), + ); }); - test('hashes files in deeply nested directories', async () => { - const {fs_exists} = vi.mocked(await import('@fuzdev/fuz_util/fs.js')); - const {readdir, readFile, stat} = vi.mocked(await import('node:fs/promises')); - const {hash_secure} = await import('@fuzdev/fuz_util/hash.js'); - - vi.mocked(fs_exists).mockResolvedValue(true); - - vi.mocked(readdir).mockImplementation((path: any) => { - const path_str = String(path); - if (path_str === 'build') { - return Promise.resolve([mock_dir_entry('assets')] as any); - } - if (path_str === 'build/assets') { - return Promise.resolve([mock_dir_entry('js')] as any); - } - if (path_str === 'build/assets/js') { - return Promise.resolve([mock_dir_entry('vendor')] as any); - } - if (path_str === 'build/assets/js/vendor') { - return Promise.resolve([mock_dir_entry('libs')] as any); - } - if (path_str === 'build/assets/js/vendor/libs') { - return Promise.resolve([mock_file_entry('foo.js')] as any); - } - return Promise.resolve([] as any); - }); - - vi.mocked(stat).mockResolvedValue(mock_file_stats()); - vi.mocked(readFile).mockResolvedValue(Buffer.from('content')); - vi.mocked(hash_secure).mockResolvedValue('deep_hash'); + test('maps FileSnapshotEntry fields to BuildOutputEntry', async () => { + const {collect_file_snapshot} = vi.mocked(await import('@fuzdev/fuz_util/file_snapshot.js')); + + const mock_entries: Array = [ + { + path: 'build/assets/js/vendor/libs/foo.js', + hash: 'deep_hash', + size: 256, + mtime: 1729512000000, + ctime: 1729512000000, + mode: 33188, + }, + ]; + vi.mocked(collect_file_snapshot).mockResolvedValue(mock_entries); const result = await collect_build_outputs(['build']); - // Should recursively hash deeply nested file const deep_file = result.find((o) => o.path === 'build/assets/js/vendor/libs/foo.js'); expect(deep_file).toBeDefined(); expect(deep_file?.hash).toBe('deep_hash'); + expect(deep_file?.size).toBe(256); }); }); diff --git a/src/test/build_cache.file_validation.test.ts b/src/test/build_cache.file_validation.test.ts index 5ec44c399b..1df49a27b0 100644 --- a/src/test/build_cache.file_validation.test.ts +++ b/src/test/build_cache.file_validation.test.ts @@ -5,25 +5,12 @@ import {validate_build_cache} from '../lib/build_cache.ts'; import { create_mock_build_cache_metadata, create_mock_output_entry, - mock_file_stats, } from './build_cache_test_helpers.ts'; -// Mock dependencies -vi.mock('node:fs/promises', () => ({ - readFile: vi.fn(), - writeFile: vi.fn(), - mkdir: vi.fn(), - rm: vi.fn(), - stat: vi.fn(), - readdir: vi.fn(), -})); - -vi.mock('@fuzdev/fuz_util/fs.js', () => ({ - fs_exists: vi.fn(), -})); - -vi.mock('@fuzdev/fuz_util/hash.js', () => ({ - hash_secure: vi.fn(), +// Mock file_snapshot from fuz_util (validate_build_cache now delegates to validate_file_snapshot) +vi.mock('@fuzdev/fuz_util/file_snapshot.js', () => ({ + collect_file_snapshot: vi.fn(), + validate_file_snapshot: vi.fn(), })); describe('validate_build_cache', () => { @@ -31,10 +18,8 @@ describe('validate_build_cache', () => { vi.clearAllMocks(); }); - test('returns true when all output files match hashes and sizes', async () => { - const {fs_exists} = vi.mocked(await import('@fuzdev/fuz_util/fs.js')); - const {readFile, stat} = vi.mocked(await import('node:fs/promises')); - const {hash_secure} = await import('@fuzdev/fuz_util/hash.js'); + test('returns true when validate_file_snapshot returns true', async () => { + const {validate_file_snapshot} = vi.mocked(await import('@fuzdev/fuz_util/file_snapshot.js')); const metadata = create_mock_build_cache_metadata({ outputs: [ @@ -43,100 +28,62 @@ describe('validate_build_cache', () => { ], }); - vi.mocked(fs_exists).mockResolvedValue(true); - vi.mocked(stat).mockImplementation((path: any) => { - if (String(path) === 'build/index.html') { - return Promise.resolve(mock_file_stats(1024)); - } - return Promise.resolve(mock_file_stats(2048)); - }); - vi.mocked(readFile).mockResolvedValue(Buffer.from('content')); - - let call_count = 0; - // eslint-disable-next-line @typescript-eslint/require-await - vi.mocked(hash_secure).mockImplementation(async () => { - call_count++; - return call_count === 1 ? 'hash1' : 'hash2'; - }); + vi.mocked(validate_file_snapshot).mockResolvedValue(true); const result = await validate_build_cache(metadata); expect(result).toBe(true); }); - test('returns false when output file is missing', async () => { - const {fs_exists} = vi.mocked(await import('@fuzdev/fuz_util/fs.js')); + test('returns false when validate_file_snapshot returns false', async () => { + const {validate_file_snapshot} = vi.mocked(await import('@fuzdev/fuz_util/file_snapshot.js')); const metadata = create_mock_build_cache_metadata({ outputs: [create_mock_output_entry('build/index.html')], }); - vi.mocked(fs_exists).mockResolvedValue(false); + vi.mocked(validate_file_snapshot).mockResolvedValue(false); const result = await validate_build_cache(metadata); expect(result).toBe(false); }); - test('returns false when output file size differs (fast path)', async () => { - const {fs_exists} = vi.mocked(await import('@fuzdev/fuz_util/fs.js')); - const {stat} = vi.mocked(await import('node:fs/promises')); + test('passes metadata outputs and concurrency to validate_file_snapshot', async () => { + const {validate_file_snapshot} = vi.mocked(await import('@fuzdev/fuz_util/file_snapshot.js')); - const metadata = create_mock_build_cache_metadata({ - outputs: [create_mock_output_entry('build/index.html', {hash: 'expected_hash', size: 1024})], - }); + const outputs = [ + create_mock_output_entry('build/index.html', {hash: 'hash1'}), + create_mock_output_entry('build/bundle.js', {hash: 'hash2', size: 2048}), + create_mock_output_entry('build/styles.css', {hash: 'hash3', size: 512}), + ]; - vi.mocked(fs_exists).mockResolvedValue(true); - vi.mocked(stat).mockResolvedValue(mock_file_stats(2048)); // Different size + const metadata = create_mock_build_cache_metadata({outputs}); - const result = await validate_build_cache(metadata); - - expect(result).toBe(false); - }); + vi.mocked(validate_file_snapshot).mockResolvedValue(true); - test('returns false when output file hash does not match', async () => { - const {fs_exists} = vi.mocked(await import('@fuzdev/fuz_util/fs.js')); - const {readFile, stat} = vi.mocked(await import('node:fs/promises')); - const {hash_secure} = await import('@fuzdev/fuz_util/hash.js'); + await validate_build_cache(metadata); - const metadata = create_mock_build_cache_metadata({ - outputs: [create_mock_output_entry('build/index.html', {hash: 'expected_hash'})], + expect(validate_file_snapshot).toHaveBeenCalledWith({ + entries: outputs, + concurrency: 20, }); - - vi.mocked(fs_exists).mockResolvedValue(true); - vi.mocked(stat).mockResolvedValue(mock_file_stats()); - vi.mocked(readFile).mockResolvedValue(Buffer.from('content')); - vi.mocked(hash_secure).mockResolvedValue('different_hash'); - - const result = await validate_build_cache(metadata); - - expect(result).toBe(false); }); - test('returns false when some files exist but others are missing', async () => { - const {fs_exists} = vi.mocked(await import('@fuzdev/fuz_util/fs.js')); + test('returns false for empty outputs when validate_file_snapshot returns false', async () => { + const {validate_file_snapshot} = vi.mocked(await import('@fuzdev/fuz_util/file_snapshot.js')); - const metadata = create_mock_build_cache_metadata({ - outputs: [ - create_mock_output_entry('build/index.html', {hash: 'hash1'}), - create_mock_output_entry('build/missing.js', {hash: 'hash2', size: 2048}), - create_mock_output_entry('build/another.css', {hash: 'hash3', size: 512}), - ], - }); + const metadata = create_mock_build_cache_metadata({outputs: []}); - vi.mocked(fs_exists).mockImplementation((path: any) => { - return Promise.resolve(String(path) === 'build/index.html'); - }); + vi.mocked(validate_file_snapshot).mockResolvedValue(false); const result = await validate_build_cache(metadata); expect(result).toBe(false); }); - test('returns false when parallel hash validation has mixed results', async () => { - const {fs_exists} = vi.mocked(await import('@fuzdev/fuz_util/fs.js')); - const {readFile, stat} = vi.mocked(await import('node:fs/promises')); - const {hash_secure} = await import('@fuzdev/fuz_util/hash.js'); + test('delegates validation entirely to validate_file_snapshot', async () => { + const {validate_file_snapshot} = vi.mocked(await import('@fuzdev/fuz_util/file_snapshot.js')); const metadata = create_mock_build_cache_metadata({ outputs: [ @@ -146,45 +93,31 @@ describe('validate_build_cache', () => { ], }); - vi.mocked(fs_exists).mockResolvedValue(true); - vi.mocked(stat).mockResolvedValue(mock_file_stats()); - vi.mocked(readFile).mockResolvedValue(Buffer.from('content')); - - let call_count = 0; - // eslint-disable-next-line @typescript-eslint/require-await - vi.mocked(hash_secure).mockImplementation(async () => { - call_count++; - return call_count <= 2 ? 'correct_hash' : 'wrong_hash'; - }); + // validate_file_snapshot handles all the internal logic (size check, hash check, etc.) + vi.mocked(validate_file_snapshot).mockResolvedValue(false); const result = await validate_build_cache(metadata); expect(result).toBe(false); + expect(validate_file_snapshot).toHaveBeenCalledTimes(1); }); - test('returns false when file is deleted between size check and hash validation', async () => { - const {fs_exists} = vi.mocked(await import('@fuzdev/fuz_util/fs.js')); - const {readFile, stat} = vi.mocked(await import('node:fs/promises')); + test('returns true for single file when validate_file_snapshot succeeds', async () => { + const {validate_file_snapshot} = vi.mocked(await import('@fuzdev/fuz_util/file_snapshot.js')); const metadata = create_mock_build_cache_metadata({ outputs: [create_mock_output_entry('build/index.html', {size: 1024})], }); - vi.mocked(fs_exists).mockResolvedValue(true); - vi.mocked(stat).mockResolvedValue(mock_file_stats()); - - vi.mocked(readFile).mockImplementation(() => { - return Promise.reject(new Error('ENOENT: no such file or directory')); - }); + vi.mocked(validate_file_snapshot).mockResolvedValue(true); const result = await validate_build_cache(metadata); - expect(result).toBe(false); + expect(result).toBe(true); }); - test('returns false when file becomes inaccessible during hash validation', async () => { - const {fs_exists} = vi.mocked(await import('@fuzdev/fuz_util/fs.js')); - const {readFile, stat} = vi.mocked(await import('node:fs/promises')); + test('returns false for multiple files when validate_file_snapshot fails', async () => { + const {validate_file_snapshot} = vi.mocked(await import('@fuzdev/fuz_util/file_snapshot.js')); const metadata = create_mock_build_cache_metadata({ outputs: [ @@ -193,20 +126,7 @@ describe('validate_build_cache', () => { ], }); - vi.mocked(fs_exists).mockResolvedValue(true); - vi.mocked(stat).mockImplementation((path: any) => { - if (String(path) === 'build/index.html') { - return Promise.resolve(mock_file_stats(1024)); - } - return Promise.resolve(mock_file_stats(2048)); - }); - - vi.mocked(readFile).mockImplementation((path: any) => { - if (String(path) === 'build/index.html') { - return Promise.resolve(Buffer.from('content')); - } - return Promise.reject(new Error('EACCES: permission denied')); - }); + vi.mocked(validate_file_snapshot).mockResolvedValue(false); const result = await validate_build_cache(metadata); diff --git a/src/test/build_task_test_helpers.ts b/src/test/build_task_test_helpers.ts index 43cf8ec36d..a87d48709e 100644 --- a/src/test/build_task_test_helpers.ts +++ b/src/test/build_task_test_helpers.ts @@ -3,6 +3,7 @@ import {vi} from 'vitest'; import type {TaskContext} from '../lib/task.ts'; import type {Args} from '../lib/build.task.ts'; import type {GroConfig} from '../lib/gro_config.ts'; + import {create_mock_task_context} from './test_helpers.ts'; /** diff --git a/src/test/deploy_task_test_helpers.ts b/src/test/deploy_task_test_helpers.ts index 6f03cfd4a9..acb58c915d 100644 --- a/src/test/deploy_task_test_helpers.ts +++ b/src/test/deploy_task_test_helpers.ts @@ -3,6 +3,7 @@ import {vi} from 'vitest'; import type {TaskContext} from '../lib/task.ts'; import type {Args} from '../lib/deploy.task.ts'; import type {GroConfig} from '../lib/gro_config.ts'; + import {create_mock_task_context} from './test_helpers.ts'; /* eslint-disable @typescript-eslint/explicit-module-boundary-types */ diff --git a/src/test/gen_helpers.test.ts b/src/test/gen_helpers.test.ts index a599a8bfde..eabe5b70eb 100644 --- a/src/test/gen_helpers.test.ts +++ b/src/test/gen_helpers.test.ts @@ -1,11 +1,11 @@ import {test, expect, vi, beforeEach} from 'vitest'; import {resolve} from 'node:path'; +import type {Logger} from '@fuzdev/fuz_util/log.js'; +import type {Timings} from '@fuzdev/fuz_util/timings.js'; import {should_trigger_gen} from '../lib/gen_helpers.ts'; import type {Filer} from '../lib/filer.ts'; import * as filer from '../lib/filer.ts'; -import type {Logger} from '@fuzdev/fuz_util/log.js'; -import type {Timings} from '@fuzdev/fuz_util/timings.js'; import type {GroConfig} from '../lib/gro_config.ts'; import type {InvokeTask} from '../lib/task.ts'; import type {Disknode} from '../lib/disknode.ts'; diff --git a/src/test/loader.test.ts b/src/test/loader.test.ts index 7d8a0371a1..c42f7fc6c4 100644 --- a/src/test/loader.test.ts +++ b/src/test/loader.test.ts @@ -2,6 +2,7 @@ import {test, expect} from 'vitest'; import {resolve} from 'node:path'; import {resolve_gro_module_path, spawn_with_loader} from '../lib/gro_helpers.ts'; + import {TEST_TIMEOUT_MD} from './test_helpers.ts'; test( diff --git a/src/test/parse_exports.test.ts b/src/test/parse_exports.test.ts index 28b4e50d4b..a54c5d9f2b 100644 --- a/src/test/parse_exports.test.ts +++ b/src/test/parse_exports.test.ts @@ -7,6 +7,7 @@ import { process_ts_exports, type ExportDeclaration, } from '../lib/parse_exports.ts'; + import {create_ts_test_env} from './test_helpers.ts'; const dir = resolve(dirname(fileURLToPath(import.meta.url)), '../lib'); diff --git a/src/test/run_gen.test.ts b/src/test/run_gen.test.ts index 3e25d1924c..8159e52995 100644 --- a/src/test/run_gen.test.ts +++ b/src/test/run_gen.test.ts @@ -2,6 +2,7 @@ import {test, expect} from 'vitest'; import {resolve} from 'node:path'; import {resolve_gro_module_path, spawn_with_loader} from '../lib/gro_helpers.js'; + import {TEST_TIMEOUT_MD} from './test_helpers.ts'; test( diff --git a/src/test/run_task.test.ts b/src/test/run_task.test.ts index fa2e4ca443..ff9d399b45 100644 --- a/src/test/run_task.test.ts +++ b/src/test/run_task.test.ts @@ -3,6 +3,7 @@ import {resolve} from 'node:path'; import {existsSync} from 'node:fs'; import {resolve_gro_module_path, spawn_with_loader} from '../lib/gro_helpers.ts'; + import {TEST_TIMEOUT_MD} from './test_helpers.ts'; test( diff --git a/src/test/sveltekit_shim_env.test.ts b/src/test/sveltekit_shim_env.test.ts index 5fb93dc86f..d836501bdb 100644 --- a/src/test/sveltekit_shim_env.test.ts +++ b/src/test/sveltekit_shim_env.test.ts @@ -2,6 +2,7 @@ import {describe, test, expect} from 'vitest'; import {resolve} from 'node:path'; import {resolve_gro_module_path, spawn_with_loader} from '../lib/gro_helpers.ts'; + import {TEST_TIMEOUT_MD} from './test_helpers.ts'; describe('sveltekit shim env', () => {