Merge branch 'fix/MULTIVERSION-0001-multi-version-indexing'

This commit is contained in:
Giancarmine Salucci
2026-03-29 12:44:47 +02:00
49 changed files with 3273 additions and 1049 deletions

View File

@@ -165,3 +165,26 @@ Add subsequent research below this section.
- Risks / follow-ups:
- Base-aware navigation fixes must preserve internal app routing semantics and should not replace intentional external navigation, because SvelteKit `goto(...)` no longer accepts external URLs.
- Settings and search page lifecycle changes must avoid reintroducing SSR-triggered fetches or self-triggered URL loops; client-only bootstrap logic should remain mounted once and URL-sync effects must stay idempotent.
### 2026-03-27 — ROUTING-0001 planning research
- Task: Plan the repository-detail routing fix for slash-bearing repository IDs causing homepage SSR failures and invalid `/repos/[id]` navigation.
- Files inspected:
- `package.json`
- `src/lib/components/RepositoryCard.svelte`
- `src/routes/+page.svelte`
- `src/routes/+page.server.ts`
- `src/routes/repos/[id]/+page.server.ts`
- `src/routes/repos/[id]/+page.svelte`
- `src/routes/api/v1/api-contract.integration.test.ts`
- `src/lib/types.ts`
- Findings:
- The app is on SvelteKit `^2.50.2` and uses `$app/paths.resolve(...)` for internal navigation, including `resolveRoute('/repos/[id]', { id: repo.id })` in `RepositoryCard.svelte`.
- SvelteKits `[id]` route is a single-segment dynamic parameter. Context7 routing docs show slash-containing values belong to rest parameters like `[...param]`, so raw repository IDs containing `/` are invalid inputs for `resolveRoute('/repos/[id]', ...)`.
- The repository model intentionally stores slash-bearing IDs such as `/facebook/react`, and the existing API surface consistently treats those IDs as percent-encoded path segments. The integration contract already passes `params.id = encodeURIComponent('/facebook/react')` for `/api/v1/libs/[id]` handlers, which then call `decodeURIComponent(params.id)`.
- The homepage SSR failure is therefore rooted in UI link generation, not repository listing fetches: rendering `RepositoryCard.svelte` with a raw slash-bearing `repo.id` can throw before page load completes, which explains repeated `500` responses on `/`.
- The repo detail page currently forwards `params.id` directly into `encodeURIComponent(...)` for downstream API requests. Once detail links are generated as encoded single segments, the page loader and client-side refresh/delete/reindex flows need one normalization step so API calls continue targeting the stored repository ID instead of a doubly encoded value.
- No existing browser-facing test covers homepage card navigation or `/repos/[id]` loader behavior; the closest current evidence is the API contract test file, which already exercises encoded repository IDs on HTTP endpoints and provides reusable fixtures for slash-bearing IDs.
- Risks / follow-ups:
- The fix should preserve the existing `/repos/[id]` route shape instead of redesigning it to a rest route unless a broader navigation contract change is explicitly requested.
- Any normalization helper introduced for the repo detail page should be reused consistently across server load and client event handlers to avoid mixed encoded and decoded repository IDs during navigation and fetches.

79
scripts/build.mjs Normal file
View File

@@ -0,0 +1,79 @@
import { mkdir, readdir, rename } from 'node:fs/promises';
import { basename, dirname, join } from 'node:path';
import { fileURLToPath } from 'node:url';
import { spawn } from 'node:child_process';
const rootDir = dirname(dirname(fileURLToPath(import.meta.url)));
const routesDir = join(rootDir, 'src', 'routes');
const renamedRouteTestFiles = [];
async function collectReservedRouteTestFiles(directory) {
const entries = await readdir(directory, { withFileTypes: true });
const files = [];
for (const entry of entries) {
const entryPath = join(directory, entry.name);
if (entry.isDirectory()) {
files.push(...(await collectReservedRouteTestFiles(entryPath)));
continue;
}
if (!entry.name.startsWith('+')) {
continue;
}
if (!entry.name.includes('.test.') && !entry.name.includes('.spec.')) {
continue;
}
files.push(entryPath);
}
return files;
}
async function renameReservedRouteTests() {
const reservedRouteTestFiles = await collectReservedRouteTestFiles(routesDir);
for (const sourcePath of reservedRouteTestFiles) {
const targetPath = join(dirname(sourcePath), basename(sourcePath).slice(1));
await rename(sourcePath, targetPath);
renamedRouteTestFiles.push({ sourcePath, targetPath });
}
}
async function restoreReservedRouteTests() {
for (const { sourcePath, targetPath } of renamedRouteTestFiles.reverse()) {
await mkdir(dirname(sourcePath), { recursive: true });
await rename(targetPath, sourcePath);
}
}
function runViteBuild() {
const viteBinPath = join(rootDir, 'node_modules', 'vite', 'bin', 'vite.js');
return new Promise((resolve, reject) => {
const child = spawn(process.execPath, [viteBinPath, 'build'], {
cwd: rootDir,
stdio: 'inherit'
});
child.once('error', reject);
child.once('exit', (code) => {
if (code === 0) {
resolve();
return;
}
reject(new Error(`vite build exited with code ${code ?? 'unknown'}`));
});
});
}
try {
await renameReservedRouteTests();
await runViteBuild();
} finally {
await restoreReservedRouteTests();
}

View File

@@ -9,13 +9,13 @@
import { initializeDatabase } from '$lib/server/db/index.js';
import { getClient } from '$lib/server/db/client.js';
import { initializePipeline } from '$lib/server/pipeline/startup.js';
import {
EMBEDDING_CONFIG_KEY,
createProviderFromConfig,
defaultEmbeddingConfig
} from '$lib/server/embeddings/factory.js';
import { createProviderFromProfile } from '$lib/server/embeddings/registry.js';
import { EmbeddingService } from '$lib/server/embeddings/embedding.service.js';
import type { EmbeddingConfig } from '$lib/server/embeddings/factory.js';
import {
EmbeddingProfileEntity,
type EmbeddingProfileEntityProps
} from '$lib/server/models/embedding-profile.js';
import { EmbeddingProfileMapper } from '$lib/server/mappers/embedding-profile.mapper.js';
import type { Handle } from '@sveltejs/kit';
// ---------------------------------------------------------------------------
@@ -26,37 +26,20 @@ try {
initializeDatabase();
const db = getClient();
// Load persisted embedding configuration (if any).
const configRow = db
.prepare<[string], { value: string }>(`SELECT value FROM settings WHERE key = ?`)
.get(EMBEDDING_CONFIG_KEY);
const activeProfileRow = db
.prepare<[], EmbeddingProfileEntityProps>(
'SELECT * FROM embedding_profiles WHERE is_default = 1 AND enabled = 1 LIMIT 1'
)
.get();
let embeddingService: EmbeddingService | null = null;
if (configRow) {
try {
const config: EmbeddingConfig =
typeof configRow.value === 'string'
? JSON.parse(configRow.value)
: (configRow.value as EmbeddingConfig);
if (config.provider !== 'none') {
const provider = createProviderFromConfig(config);
embeddingService = new EmbeddingService(db, provider);
}
} catch (err) {
console.warn(
`[hooks.server] Could not load embedding config: ${err instanceof Error ? err.message : String(err)}`
if (activeProfileRow) {
const activeProfile = EmbeddingProfileMapper.fromEntity(
new EmbeddingProfileEntity(activeProfileRow)
);
}
} else {
// Use the default (noop) config so the pipeline is still wired up.
const config = defaultEmbeddingConfig();
if (config.provider !== 'none') {
const provider = createProviderFromConfig(config);
embeddingService = new EmbeddingService(db, provider);
}
const provider = createProviderFromProfile(activeProfile);
embeddingService = new EmbeddingService(db, provider, activeProfile.id);
}
initializePipeline(db, embeddingService);

View File

@@ -1,13 +1,25 @@
<script lang="ts">
import { resolve as resolveRoute } from '$app/paths';
import type { Repository } from '$lib/types';
type RepositoryCardRepo = {
id: string;
title: string;
description: string | null;
state: 'pending' | 'indexing' | 'indexed' | 'error';
totalSnippets: number;
trustScore: number;
stars: number | null;
lastIndexedAt: string | Date | null;
embeddingCount?: number;
indexedVersions?: string[];
};
let {
repo,
onReindex,
onDelete
}: {
repo: Repository;
repo: RepositoryCardRepo;
onReindex: (id: string) => void;
onDelete: (id: string) => void;
} = $props();
@@ -26,8 +38,25 @@
error: 'Error'
};
const detailsHref = $derived(
resolveRoute('/repos/[id]', { id: encodeURIComponent(repo.id) })
);
const totalSnippets = $derived(repo.totalSnippets ?? 0);
const trustScore = $derived(repo.trustScore ?? 0);
const embeddingCount = $derived(repo.embeddingCount ?? 0);
const indexedVersions = $derived(repo.indexedVersions ?? []);
const indexedVersionsLabel = $derived.by(() => {
if (indexedVersions.length === 0) {
return 'No indexed version yet';
}
if (indexedVersions.length <= 2) {
return indexedVersions.join(', ');
}
return `${indexedVersions.slice(0, 2).join(', ')} +${indexedVersions.length - 2} more`;
});
</script>
<div class="rounded-xl border border-gray-200 bg-white p-5 shadow-sm">
@@ -63,6 +92,12 @@
{/if}
</div>
<div class="mt-2 flex flex-wrap gap-x-4 gap-y-1 text-sm text-gray-500">
<span>{embeddingCount.toLocaleString()} embeddings</span>
<span>·</span>
<span>Indexed: {indexedVersionsLabel}</span>
</div>
{#if repo.state === 'error'}
<p class="mt-2 text-xs text-red-600">Indexing failed. Check jobs for details.</p>
{/if}
@@ -77,7 +112,7 @@
{repo.state === 'indexing' ? 'Indexing...' : 'Re-index'}
</button>
<a
href={resolveRoute('/repos/[id]', { id: repo.id })}
href={detailsHref}
class="rounded-lg border border-gray-200 px-3 py-1.5 text-sm text-gray-700 hover:bg-gray-50"
>
Details

View File

@@ -0,0 +1,32 @@
import { page } from 'vitest/browser';
import { describe, expect, it, vi } from 'vitest';
import { render } from 'vitest-browser-svelte';
import RepositoryCard from './RepositoryCard.svelte';
describe('RepositoryCard.svelte', () => {
it('encodes slash-bearing repository ids in the details href', async () => {
render(RepositoryCard, {
repo: {
id: '/facebook/react',
title: 'React',
description: 'A JavaScript library for building user interfaces',
state: 'indexed',
totalSnippets: 1234,
embeddingCount: 1200,
indexedVersions: ['main', 'v18.3.0'],
trustScore: 9.7,
stars: 230000,
lastIndexedAt: null
} as never,
onReindex: vi.fn(),
onDelete: vi.fn()
});
await expect
.element(page.getByRole('link', { name: 'Details' }))
.toHaveAttribute('href', '/repos/%2Ffacebook%2Freact');
await expect.element(page.getByText('1,200 embeddings')).toBeInTheDocument();
await expect.element(page.getByText('Indexed: main, v18.3.0')).toBeInTheDocument();
});
});

View File

@@ -0,0 +1,41 @@
import type { EmbeddingProviderKind } from '$lib/types';
export interface EmbeddingProfileConfigEntryDto {
key: string;
value: string;
redacted: boolean;
}
export interface EmbeddingProfileDto {
id: string;
providerKind: string;
title: string;
enabled: boolean;
isDefault: boolean;
model: string;
dimensions: number;
config: Record<string, unknown>;
configEntries: EmbeddingProfileConfigEntryDto[];
createdAt: number;
updatedAt: number;
}
export interface EmbeddingSettingsDto {
profiles: EmbeddingProfileDto[];
activeProfileId: string | null;
activeProfile: EmbeddingProfileDto | null;
}
export interface EmbeddingProfileUpsertDto {
id: string;
providerKind: EmbeddingProviderKind;
title: string;
model: string;
dimensions: number;
config: Record<string, unknown>;
}
export interface EmbeddingSettingsUpdateDto {
activeProfileId: string | null;
profile?: EmbeddingProfileUpsertDto;
}

View File

@@ -143,6 +143,9 @@ export function formatContextTxt(
}
noResults.push(`Result count: ${metadata?.resultCount ?? 0}`);
if (metadata?.searchModeUsed) {
noResults.push(`Search mode: ${metadata.searchModeUsed}`);
}
parts.push(noResults.join('\n'));
return parts.join('\n\n');

View File

@@ -0,0 +1,30 @@
PRAGMA foreign_keys=OFF;
--> statement-breakpoint
CREATE TABLE `__new_repository_configs` (
`repository_id` text NOT NULL,
`version_id` text,
`project_title` text,
`description` text,
`folders` text,
`exclude_folders` text,
`exclude_files` text,
`rules` text,
`previous_versions` text,
`updated_at` integer NOT NULL,
FOREIGN KEY (`repository_id`) REFERENCES `repositories`(`id`) ON UPDATE no action ON DELETE cascade
);
--> statement-breakpoint
INSERT INTO `__new_repository_configs`
(repository_id, version_id, project_title, description, folders, exclude_folders, exclude_files, rules, previous_versions, updated_at)
SELECT repository_id, NULL, project_title, description, folders, exclude_folders, exclude_files, rules, previous_versions, updated_at
FROM `repository_configs`;
--> statement-breakpoint
DROP TABLE `repository_configs`;
--> statement-breakpoint
ALTER TABLE `__new_repository_configs` RENAME TO `repository_configs`;
--> statement-breakpoint
PRAGMA foreign_keys=ON;
--> statement-breakpoint
CREATE UNIQUE INDEX `uniq_repo_config_base` ON `repository_configs` (`repository_id`) WHERE `version_id` IS NULL;
--> statement-breakpoint
CREATE UNIQUE INDEX `uniq_repo_config_version` ON `repository_configs` (`repository_id`, `version_id`) WHERE `version_id` IS NOT NULL;

View File

@@ -22,6 +22,13 @@
"when": 1774461897742,
"tag": "0002_silky_stellaris",
"breakpoints": true
},
{
"idx": 3,
"version": "6",
"when": 1743155877000,
"tag": "0003_multiversion_config",
"breakpoints": true
}
]
}

View File

@@ -1,4 +1,13 @@
import { blob, integer, primaryKey, real, sqliteTable, text } from 'drizzle-orm/sqlite-core';
import { sql } from 'drizzle-orm';
import {
blob,
integer,
primaryKey,
real,
sqliteTable,
text,
uniqueIndex
} from 'drizzle-orm/sqlite-core';
// ---------------------------------------------------------------------------
// repositories
@@ -148,10 +157,13 @@ export const indexingJobs = sqliteTable('indexing_jobs', {
// ---------------------------------------------------------------------------
// repository_configs
// ---------------------------------------------------------------------------
export const repositoryConfigs = sqliteTable('repository_configs', {
export const repositoryConfigs = sqliteTable(
'repository_configs',
{
repositoryId: text('repository_id')
.primaryKey()
.notNull()
.references(() => repositories.id, { onDelete: 'cascade' }),
versionId: text('version_id'),
projectTitle: text('project_title'),
description: text('description'),
folders: text('folders', { mode: 'json' }).$type<string[]>(),
@@ -162,7 +174,16 @@ export const repositoryConfigs = sqliteTable('repository_configs', {
{ tag: string; title: string; commitHash?: string }[]
>(),
updatedAt: integer('updated_at', { mode: 'timestamp' }).notNull()
});
},
(table) => [
uniqueIndex('uniq_repo_config_base')
.on(table.repositoryId)
.where(sql`${table.versionId} IS NULL`),
uniqueIndex('uniq_repo_config_version')
.on(table.repositoryId, table.versionId)
.where(sql`${table.versionId} IS NOT NULL`)
]
);
// ---------------------------------------------------------------------------
// settings

View File

@@ -408,6 +408,36 @@ describe('EmbeddingService', () => {
expect(embedding![2]).toBeCloseTo(0.2, 5);
});
it('stores embeddings under the configured profile ID', async () => {
client
.prepare(
`INSERT INTO embedding_profiles
(id, provider_kind, title, enabled, is_default, model, dimensions, config, created_at, updated_at)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, unixepoch(), unixepoch())`
)
.run(
'openai-custom',
'openai-compatible',
'OpenAI Custom',
1,
0,
'test-model',
4,
'{}'
);
const snippetId = seedSnippet(db, client);
const provider = makeProvider(4, 'test-model');
const service = new EmbeddingService(client, provider, 'openai-custom');
await service.embedSnippets([snippetId]);
const row = client
.prepare('SELECT profile_id FROM snippet_embeddings WHERE snippet_id = ?')
.get(snippetId) as { profile_id: string };
expect(row.profile_id).toBe('openai-custom');
});
it('is idempotent — re-embedding replaces the existing row', async () => {
const snippetId = seedSnippet(db, client);
const provider = makeProvider(2);
@@ -469,6 +499,19 @@ describe('EmbeddingService', () => {
};
expect(rows.cnt).toBe(0);
});
it('finds snippets missing embeddings for the active profile', async () => {
const firstSnippetId = seedSnippet(db, client);
const secondSnippetId = seedSnippet(db, client, { content: 'Second snippet content' });
const provider = makeProvider(4);
const service = new EmbeddingService(client, provider, 'local-default');
await service.embedSnippets([firstSnippetId]);
expect(service.findSnippetIdsMissingEmbeddings('/test/embed-repo', null)).toEqual([
secondSnippetId
]);
});
});
// ---------------------------------------------------------------------------

View File

@@ -23,6 +23,42 @@ export class EmbeddingService {
private readonly profileId: string = 'local-default'
) {}
findSnippetIdsMissingEmbeddings(repositoryId: string, versionId: string | null): string[] {
if (versionId) {
const rows = this.db
.prepare<[string, string, string], { id: string }>(
`SELECT snippets.id
FROM snippets
LEFT JOIN snippet_embeddings
ON snippet_embeddings.snippet_id = snippets.id
AND snippet_embeddings.profile_id = ?
WHERE snippets.repository_id = ?
AND snippets.version_id = ?
AND snippet_embeddings.snippet_id IS NULL
ORDER BY snippets.id`
)
.all(this.profileId, repositoryId, versionId);
return rows.map((row) => row.id);
}
const rows = this.db
.prepare<[string, string], { id: string }>(
`SELECT snippets.id
FROM snippets
LEFT JOIN snippet_embeddings
ON snippet_embeddings.snippet_id = snippets.id
AND snippet_embeddings.profile_id = ?
WHERE snippets.repository_id = ?
AND snippets.version_id IS NULL
AND snippet_embeddings.snippet_id IS NULL
ORDER BY snippets.id`
)
.all(this.profileId, repositoryId);
return rows.map((row) => row.id);
}
/**
* Embed the given snippet IDs and store the results in snippet_embeddings.
*

View File

@@ -1,10 +1,10 @@
/**
* LocalEmbeddingProvider — uses @xenova/transformers (optional dependency).
* LocalEmbeddingProvider — uses @xenova/transformers via dynamic import.
*
* @xenova/transformers is NOT installed by default. This provider uses a
* dynamic import so the module is only required at runtime when the local
* provider is actually configured. If the package is absent, isAvailable()
* returns false and embed() throws a clear error.
* The dynamic import keeps server startup cheap and defers loading the model
* runtime until the local provider is actually used. If the package is absent
* or cannot be resolved, isAvailable() returns false and embed() throws a
* clear error.
*/
import { EmbeddingError, type EmbeddingProvider, type EmbeddingVector } from './provider.js';

View File

@@ -44,11 +44,12 @@ export function createProviderFromProfile(profile: EmbeddingProfile): EmbeddingP
*/
export function getDefaultLocalProfile(): Pick<
EmbeddingProfile,
'id' | 'providerKind' | 'model' | 'dimensions'
'id' | 'providerKind' | 'title' | 'model' | 'dimensions'
> {
return {
id: 'local-default',
providerKind: 'local-transformers',
title: 'Local (Xenova/all-MiniLM-L6-v2)',
model: 'Xenova/all-MiniLM-L6-v2',
dimensions: 384
};

View File

@@ -15,6 +15,7 @@ import { LibrarySearchResult, SnippetSearchResult } from '$lib/server/models/sea
export interface ContextResponseMetadata {
localSource: boolean;
resultCount: number;
searchModeUsed: string;
repository: {
id: string;
title: string;
@@ -130,7 +131,8 @@ export class ContextResponseMapper {
id: metadata.version.id
})
: null,
resultCount: metadata?.resultCount ?? snippets.length
resultCount: metadata?.resultCount ?? snippets.length,
searchModeUsed: metadata?.searchModeUsed ?? 'keyword'
});
}
}

View File

@@ -0,0 +1,38 @@
import {
EmbeddingProfile,
EmbeddingProfileEntity
} from '$lib/server/models/embedding-profile.js';
function parseConfig(config: Record<string, unknown> | string | null): Record<string, unknown> {
if (!config) {
return {};
}
if (typeof config === 'string') {
try {
const parsed = JSON.parse(config);
return parsed && typeof parsed === 'object' ? (parsed as Record<string, unknown>) : {};
} catch {
return {};
}
}
return config;
}
export class EmbeddingProfileMapper {
static fromEntity(entity: EmbeddingProfileEntity): EmbeddingProfile {
return new EmbeddingProfile({
id: entity.id,
providerKind: entity.provider_kind,
title: entity.title,
enabled: Boolean(entity.enabled),
isDefault: Boolean(entity.is_default),
model: entity.model,
dimensions: entity.dimensions,
config: parseConfig(entity.config),
createdAt: entity.created_at,
updatedAt: entity.updated_at
});
}
}

View File

@@ -0,0 +1,71 @@
import type {
EmbeddingProfileConfigEntryDto,
EmbeddingProfileDto,
EmbeddingSettingsDto
} from '$lib/dtos/embedding-settings.js';
import type { EmbeddingProfile } from '$lib/server/models/embedding-profile.js';
import { EmbeddingSettings } from '$lib/server/models/embedding-settings.js';
const REDACTED_VALUE = '[redacted]';
const SENSITIVE_CONFIG_KEY = /(api[-_]?key|token|secret|password|authorization)/i;
function formatConfigValue(value: unknown): string {
if (value === null || value === undefined) return 'null';
if (typeof value === 'string') return value;
if (typeof value === 'number' || typeof value === 'boolean') return String(value);
return JSON.stringify(value);
}
function sanitizeConfig(config: Record<string, unknown>): {
visibleConfig: Record<string, unknown>;
configEntries: EmbeddingProfileConfigEntryDto[];
} {
const visibleConfig: Record<string, unknown> = {};
const configEntries = Object.entries(config)
.sort(([left], [right]) => left.localeCompare(right))
.map(([key, value]) => {
const redacted = SENSITIVE_CONFIG_KEY.test(key);
if (!redacted) {
visibleConfig[key] = value;
}
return {
key,
value: redacted ? REDACTED_VALUE : formatConfigValue(value),
redacted
};
});
return { visibleConfig, configEntries };
}
function toProfileDto(profile: EmbeddingProfile): EmbeddingProfileDto {
const { visibleConfig, configEntries } = sanitizeConfig(profile.config);
return {
id: profile.id,
providerKind: profile.providerKind,
title: profile.title,
enabled: profile.enabled,
isDefault: profile.isDefault,
model: profile.model,
dimensions: profile.dimensions,
config: visibleConfig,
configEntries,
createdAt: profile.createdAt,
updatedAt: profile.updatedAt
};
}
export class EmbeddingSettingsDtoMapper {
static toDto(settings: EmbeddingSettings): EmbeddingSettingsDto {
const profiles = settings.profiles.map(toProfileDto);
const activeProfile = settings.activeProfile ? toProfileDto(settings.activeProfile) : null;
return {
profiles,
activeProfileId: settings.activeProfileId,
activeProfile
};
}
}

View File

@@ -173,6 +173,7 @@ export class ContextJsonResponseDto {
repository: ContextRepositoryJsonDto | null;
version: ContextVersionJsonDto | null;
resultCount: number;
searchModeUsed: string;
constructor(props: ContextJsonResponseDto) {
this.snippets = props.snippets;
@@ -182,5 +183,6 @@ export class ContextJsonResponseDto {
this.repository = props.repository;
this.version = props.version;
this.resultCount = props.resultCount;
this.searchModeUsed = props.searchModeUsed;
}
}

View File

@@ -0,0 +1,77 @@
export interface EmbeddingProfileEntityProps {
id: string;
provider_kind: string;
title: string;
enabled: boolean | number;
is_default: boolean | number;
model: string;
dimensions: number;
config: Record<string, unknown> | string | null;
created_at: number;
updated_at: number;
}
export class EmbeddingProfileEntity {
id: string;
provider_kind: string;
title: string;
enabled: boolean | number;
is_default: boolean | number;
model: string;
dimensions: number;
config: Record<string, unknown> | string | null;
created_at: number;
updated_at: number;
constructor(props: EmbeddingProfileEntityProps) {
this.id = props.id;
this.provider_kind = props.provider_kind;
this.title = props.title;
this.enabled = props.enabled;
this.is_default = props.is_default;
this.model = props.model;
this.dimensions = props.dimensions;
this.config = props.config;
this.created_at = props.created_at;
this.updated_at = props.updated_at;
}
}
export interface EmbeddingProfileProps {
id: string;
providerKind: string;
title: string;
enabled: boolean;
isDefault: boolean;
model: string;
dimensions: number;
config: Record<string, unknown>;
createdAt: number;
updatedAt: number;
}
export class EmbeddingProfile {
id: string;
providerKind: string;
title: string;
enabled: boolean;
isDefault: boolean;
model: string;
dimensions: number;
config: Record<string, unknown>;
createdAt: number;
updatedAt: number;
constructor(props: EmbeddingProfileProps) {
this.id = props.id;
this.providerKind = props.providerKind;
this.title = props.title;
this.enabled = props.enabled;
this.isDefault = props.isDefault;
this.model = props.model;
this.dimensions = props.dimensions;
this.config = props.config;
this.createdAt = props.createdAt;
this.updatedAt = props.updatedAt;
}
}

View File

@@ -0,0 +1,20 @@
import type { EmbeddingProfile } from './embedding-profile.js';
export interface EmbeddingSettingsProps {
profiles: EmbeddingProfile[];
activeProfile: EmbeddingProfile | null;
}
export class EmbeddingSettings {
profiles: EmbeddingProfile[];
activeProfile: EmbeddingProfile | null;
constructor(props: EmbeddingSettingsProps) {
this.profiles = props.profiles;
this.activeProfile = props.activeProfile;
}
get activeProfileId(): string | null {
return this.activeProfile?.id ?? null;
}
}

View File

@@ -12,6 +12,7 @@ import { join } from 'node:path';
import { JobQueue } from './job-queue.js';
import { IndexingPipeline } from './indexing.pipeline.js';
import { recoverStaleJobs } from './startup.js';
import { EmbeddingService } from '$lib/server/embeddings/embedding.service.js';
// ---------------------------------------------------------------------------
// Test DB factory
@@ -22,7 +23,13 @@ function createTestDb(): Database.Database {
client.pragma('foreign_keys = ON');
const migrationsFolder = join(import.meta.dirname, '../db/migrations');
const migrationSql = readFileSync(join(migrationsFolder, '0000_large_master_chief.sql'), 'utf-8');
for (const migrationFile of [
'0000_large_master_chief.sql',
'0001_quick_nighthawk.sql',
'0002_silky_stellaris.sql',
'0003_multiversion_config.sql'
]) {
const migrationSql = readFileSync(join(migrationsFolder, migrationFile), 'utf-8');
const statements = migrationSql
.split('--> statement-breakpoint')
@@ -32,6 +39,7 @@ function createTestDb(): Database.Database {
for (const stmt of statements) {
client.exec(stmt);
}
}
return client;
}
@@ -68,6 +76,28 @@ function insertRepo(db: Database.Database, overrides: Partial<Record<string, unk
);
}
function insertVersion(
db: Database.Database,
overrides: Partial<Record<string, unknown>> = {}
): string {
const id = crypto.randomUUID();
db.prepare(
`INSERT INTO repository_versions
(id, repository_id, tag, title, state, total_snippets, indexed_at, created_at)
VALUES (?, ?, ?, ?, ?, ?, ?, ?)`
).run(
overrides.id ?? id,
overrides.repository_id ?? '/test/repo',
overrides.tag ?? 'v1.0.0',
overrides.title ?? null,
overrides.state ?? 'pending',
overrides.total_snippets ?? 0,
overrides.indexed_at ?? null,
overrides.created_at ?? now
);
return (overrides.id as string) ?? id;
}
function insertJob(
db: Database.Database,
overrides: Partial<Record<string, unknown>> = {}
@@ -238,7 +268,8 @@ describe('IndexingPipeline', () => {
crawlResult: {
files: Array<{ path: string; content: string; sha: string; language: string }>;
totalFiles: number;
} = { files: [], totalFiles: 0 }
} = { files: [], totalFiles: 0 },
embeddingService: EmbeddingService | null = null
) {
const mockGithubCrawl = vi.fn().mockResolvedValue({
...crawlResult,
@@ -256,11 +287,20 @@ describe('IndexingPipeline', () => {
})
};
return new IndexingPipeline(db, mockGithubCrawl as never, mockLocalCrawler as never, null);
return new IndexingPipeline(
db,
mockGithubCrawl as never,
mockLocalCrawler as never,
embeddingService
);
}
function makeJob(repositoryId = '/test/repo') {
const jobId = insertJob(db, { repository_id: repositoryId, status: 'queued' });
function makeJob(repositoryId = '/test/repo', versionId?: string) {
const jobId = insertJob(db, {
repository_id: repositoryId,
version_id: versionId ?? null,
status: 'queued'
});
return db.prepare(`SELECT * FROM indexing_jobs WHERE id = ?`).get(jobId) as {
id: string;
repositoryId?: string;
@@ -388,6 +428,64 @@ describe('IndexingPipeline', () => {
expect(secondSnippetIds).toEqual(firstSnippetIds);
});
it('re-index backfills missing embeddings for unchanged snippets', async () => {
const provider = {
name: 'test-provider',
model: 'test-model',
dimensions: 3,
embed: vi.fn(async (texts: string[]) =>
texts.map(() => ({
values: new Float32Array([0.1, 0.2, 0.3]),
dimensions: 3,
model: 'test-model'
}))
),
isAvailable: vi.fn(async () => true)
};
const embeddingService = new EmbeddingService(db, provider, 'local-default');
const files = [
{
path: 'README.md',
content: '# Hello\n\nThis is documentation.',
sha: 'sha-readme',
language: 'markdown'
}
];
const pipeline = makePipeline({ files, totalFiles: 1 }, embeddingService);
const job1 = makeJob();
await pipeline.run(job1 as never);
const firstSnippetIds = (db.prepare(`SELECT id FROM snippets ORDER BY id`).all() as { id: string }[])
.map((row) => row.id);
expect(firstSnippetIds.length).toBeGreaterThan(0);
const firstEmbeddingCount = (
db.prepare(`SELECT COUNT(*) as n FROM snippet_embeddings WHERE profile_id = 'local-default'`).get() as {
n: number;
}
).n;
expect(firstEmbeddingCount).toBe(firstSnippetIds.length);
db.prepare(`DELETE FROM snippet_embeddings WHERE profile_id = 'local-default'`).run();
const job2Id = insertJob(db, { repository_id: '/test/repo', status: 'queued' });
const job2 = db.prepare(`SELECT * FROM indexing_jobs WHERE id = ?`).get(job2Id) as never;
await pipeline.run(job2);
const secondSnippetIds = (db.prepare(`SELECT id FROM snippets ORDER BY id`).all() as {
id: string;
}[]).map((row) => row.id);
const secondEmbeddingCount = (
db.prepare(`SELECT COUNT(*) as n FROM snippet_embeddings WHERE profile_id = 'local-default'`).get() as {
n: number;
}
).n;
expect(secondSnippetIds).toEqual(firstSnippetIds);
expect(secondEmbeddingCount).toBe(firstSnippetIds.length);
});
it('replaces snippets atomically when a file changes', async () => {
const pipeline1 = makePipeline({
files: [
@@ -573,4 +671,218 @@ describe('IndexingPipeline', () => {
expect(finalJob.status).toBe('done');
expect(finalJob.progress).toBe(100);
});
it('updates repository_versions state to indexing then indexed when job has versionId', async () => {
const versionId = insertVersion(db, { tag: 'v1.0.0', state: 'pending' });
const files = [
{
path: 'README.md',
content: '# Hello\n\nThis is documentation.',
sha: 'sha-readme',
language: 'markdown'
}
];
const pipeline = makePipeline({ files, totalFiles: 1 });
const job = makeJob('/test/repo', versionId);
await pipeline.run(job as never);
const version = db
.prepare(`SELECT state, total_snippets, indexed_at FROM repository_versions WHERE id = ?`)
.get(versionId) as { state: string; total_snippets: number; indexed_at: number | null };
expect(version.state).toBe('indexed');
expect(version.total_snippets).toBeGreaterThan(0);
expect(version.indexed_at).not.toBeNull();
});
it('updates repository_versions state to error when pipeline throws and job has versionId', async () => {
const versionId = insertVersion(db, { tag: 'v1.0.0', state: 'pending' });
const errorCrawl = vi.fn().mockRejectedValue(new Error('crawl failed'));
const pipeline = new IndexingPipeline(
db,
errorCrawl as never,
{ crawl: errorCrawl } as never,
null
);
const job = makeJob('/test/repo', versionId);
await expect(pipeline.run(job as never)).rejects.toThrow('crawl failed');
const version = db
.prepare(`SELECT state FROM repository_versions WHERE id = ?`)
.get(versionId) as { state: string };
expect(version.state).toBe('error');
});
it('does not touch repository_versions when job has no versionId', async () => {
const versionId = insertVersion(db, { tag: 'v1.0.0', state: 'pending' });
const pipeline = makePipeline({ files: [], totalFiles: 0 });
const job = makeJob('/test/repo'); // no versionId
await pipeline.run(job as never);
const version = db
.prepare(`SELECT state FROM repository_versions WHERE id = ?`)
.get(versionId) as { state: string };
// State should remain 'pending' — pipeline with no versionId must not touch it
expect(version.state).toBe('pending');
});
it('calls LocalCrawler with ref=v1.2.0 when job has a versionId with tag v1.2.0', async () => {
const versionId = insertVersion(db, { tag: 'v1.2.0', state: 'pending' });
const crawl = vi.fn().mockResolvedValue({
files: [],
totalFiles: 0,
skippedFiles: 0,
branch: 'main',
commitSha: 'abc'
});
const pipeline = new IndexingPipeline(db, vi.fn() as never, { crawl } as never, null);
const job = makeJob('/test/repo', versionId);
await pipeline.run(job as never);
expect(crawl).toHaveBeenCalledWith({
rootPath: '/tmp/test-repo',
ref: 'v1.2.0'
});
});
it('calls LocalCrawler with ref=undefined when job has no versionId (main-branch)', async () => {
const crawl = vi.fn().mockResolvedValue({
files: [],
totalFiles: 0,
skippedFiles: 0,
branch: 'main',
commitSha: 'abc'
});
const pipeline = new IndexingPipeline(db, vi.fn() as never, { crawl } as never, null);
const job = makeJob('/test/repo'); // no versionId
await pipeline.run(job as never);
expect(crawl).toHaveBeenCalledWith({
rootPath: '/tmp/test-repo',
ref: undefined
});
});
it('excludes files matching excludeFiles patterns from trueref.json', async () => {
const truerefConfig = JSON.stringify({
excludeFiles: ['migration-guide.md', 'docs/legacy*']
});
const files = [
{
path: 'trueref.json',
content: truerefConfig,
sha: 'sha-config',
language: 'json'
},
{
path: 'README.md',
content: '# Hello\n\nThis is documentation.',
sha: 'sha-readme',
language: 'markdown'
},
{
path: 'migration-guide.md',
content: '# Migration Guide\n\nThis should be excluded.',
sha: 'sha-migration',
language: 'markdown'
},
{
path: 'docs/legacy-api.md',
content: '# Legacy API\n\nShould be excluded by glob prefix.',
sha: 'sha-legacy',
language: 'markdown'
}
];
const pipeline = makePipeline({ files, totalFiles: files.length });
const job = makeJob();
await pipeline.run(job as never);
const docs = db
.prepare(`SELECT file_path FROM documents ORDER BY file_path`)
.all() as { file_path: string }[];
const filePaths = docs.map((d) => d.file_path);
// migration-guide.md and docs/legacy-api.md must be absent.
expect(filePaths).not.toContain('migration-guide.md');
expect(filePaths).not.toContain('docs/legacy-api.md');
// README.md must still be indexed.
expect(filePaths).toContain('README.md');
});
it('persists repo-wide rules from trueref.json to repository_configs after indexing', async () => {
const truerefConfig = JSON.stringify({
rules: ['Always use TypeScript strict mode', 'Prefer async/await over callbacks']
});
const files = [
{
path: 'trueref.json',
content: truerefConfig,
sha: 'sha-config',
language: 'json'
}
];
const pipeline = makePipeline({ files, totalFiles: files.length });
const job = makeJob();
await pipeline.run(job as never);
const row = db
.prepare(
`SELECT rules FROM repository_configs WHERE repository_id = '/test/repo' AND version_id IS NULL`
)
.get() as { rules: string } | undefined;
expect(row).toBeDefined();
const rules = JSON.parse(row!.rules);
expect(rules).toEqual(['Always use TypeScript strict mode', 'Prefer async/await over callbacks']);
});
it('persists version-specific rules under (repositoryId, versionId) when job has versionId', async () => {
const versionId = insertVersion(db, { tag: 'v2.0.0', state: 'pending' });
const truerefConfig = JSON.stringify({
rules: ['This is v2. Use the new Builder API.']
});
const files = [
{
path: 'trueref.json',
content: truerefConfig,
sha: 'sha-config',
language: 'json'
}
];
const pipeline = makePipeline({ files, totalFiles: files.length });
const job = makeJob('/test/repo', versionId);
await pipeline.run(job as never);
// Repo-wide row (version_id IS NULL) must exist.
const repoRow = db
.prepare(
`SELECT rules FROM repository_configs WHERE repository_id = '/test/repo' AND version_id IS NULL`
)
.get() as { rules: string } | undefined;
expect(repoRow).toBeDefined();
// Version-specific row must also exist.
const versionRow = db
.prepare(
`SELECT rules FROM repository_configs WHERE repository_id = '/test/repo' AND version_id = ?`
)
.get(versionId) as { rules: string } | undefined;
expect(versionRow).toBeDefined();
const rules = JSON.parse(versionRow!.rules);
expect(rules).toEqual(['This is v2. Use the new Builder API.']);
});
});

View File

@@ -22,6 +22,7 @@ import type { EmbeddingService } from '$lib/server/embeddings/embedding.service.
import { RepositoryMapper } from '$lib/server/mappers/repository.mapper.js';
import { IndexingJob } from '$lib/server/models/indexing-job.js';
import { Repository, RepositoryEntity } from '$lib/server/models/repository.js';
import { resolveConfig } from '$lib/server/config/config-parser.js';
import { parseFile } from '$lib/server/parser/index.js';
import { computeTrustScore } from '$lib/server/search/trust-score.js';
import { computeDiff } from './diff.js';
@@ -90,18 +91,41 @@ export class IndexingPipeline {
// Mark repo as actively indexing.
this.updateRepo(repo.id, { state: 'indexing' });
if (normJob.versionId) {
this.updateVersion(normJob.versionId, { state: 'indexing' });
}
// ---- Stage 1: Crawl -------------------------------------------------
const crawlResult = await this.crawl(repo);
const totalFiles = crawlResult.totalFiles;
const versionTag = normJob.versionId
? this.getVersionTag(normJob.versionId)
: undefined;
const crawlResult = await this.crawl(repo, versionTag);
// Parse trueref.json / context7.json if present in the crawl results.
const configFile = crawlResult.files.find(
(f) => f.path === 'trueref.json' || f.path === 'context7.json'
);
const parsedConfig = configFile
? resolveConfig([{ filename: configFile.path, content: configFile.content }])
: null;
const excludeFiles: string[] = parsedConfig?.config.excludeFiles ?? [];
// Filter out excluded files before diff computation.
const filteredFiles =
excludeFiles.length > 0
? crawlResult.files.filter(
(f) => !excludeFiles.some((pattern) => IndexingPipeline.matchesExcludePattern(f.path, pattern))
)
: crawlResult.files;
const totalFiles = filteredFiles.length;
this.updateJob(job.id, { totalFiles });
// ---- Stage 2: Parse & diff ------------------------------------------
// Load all existing documents for this repo so computeDiff can
// classify every crawled file and detect deletions.
const existingDocs = this.getExistingDocuments(repo.id, normJob.versionId);
const diff = computeDiff(crawlResult.files, existingDocs);
const diff = computeDiff(filteredFiles, existingDocs);
// Accumulate new documents/snippets; skip unchanged files.
const newDocuments: NewDocument[] = [];
@@ -187,8 +211,15 @@ export class IndexingPipeline {
this.replaceSnippets(repo.id, changedDocIds, newDocuments, newSnippets);
// ---- Stage 4: Embeddings (if provider is configured) ----------------
if (this.embeddingService && newSnippets.length > 0) {
const snippetIds = newSnippets.map((s) => s.id!);
if (this.embeddingService) {
const snippetIds = this.embeddingService.findSnippetIdsMissingEmbeddings(
repo.id,
normJob.versionId
);
if (snippetIds.length === 0) {
// No missing embeddings for the active profile; parsing progress is final.
} else {
const embeddingsTotal = snippetIds.length;
await this.embeddingService.embedSnippets(snippetIds, (done) => {
@@ -202,6 +233,7 @@ export class IndexingPipeline {
this.updateJob(job.id, { progress });
});
}
}
// ---- Stage 5: Update repository stats --------------------------------
const stats = this.computeStats(repo.id);
@@ -221,6 +253,25 @@ export class IndexingPipeline {
lastIndexedAt: Math.floor(Date.now() / 1000)
});
if (normJob.versionId) {
const versionStats = this.computeVersionStats(normJob.versionId);
this.updateVersion(normJob.versionId, {
state: 'indexed',
totalSnippets: versionStats.totalSnippets,
indexedAt: Math.floor(Date.now() / 1000)
});
}
// ---- Stage 6: Persist rules from config ----------------------------
if (parsedConfig?.config.rules?.length) {
// Repo-wide rules (versionId = null).
this.upsertRepoConfig(repo.id, null, parsedConfig.config.rules);
// Version-specific rules stored separately when indexing a version.
if (normJob.versionId) {
this.upsertRepoConfig(repo.id, normJob.versionId, parsedConfig.config.rules);
}
}
this.updateJob(job.id, {
status: 'done',
progress: 100,
@@ -238,6 +289,9 @@ export class IndexingPipeline {
// Restore repo to error state but preserve any existing indexed data.
this.updateRepo(repositoryId, { state: 'error' });
if (normJob.versionId) {
this.updateVersion(normJob.versionId, { state: 'error' });
}
throw error;
}
@@ -247,7 +301,7 @@ export class IndexingPipeline {
// Private — crawl
// -------------------------------------------------------------------------
private async crawl(repo: Repository): Promise<{
private async crawl(repo: Repository, ref?: string): Promise<{
files: Array<{ path: string; content: string; sha: string; size: number; language: string }>;
totalFiles: number;
}> {
@@ -264,7 +318,7 @@ export class IndexingPipeline {
const result = await this.githubCrawl({
owner,
repo: repoName,
ref: repo.branch ?? undefined,
ref: ref ?? repo.branch ?? undefined,
token: repo.githubToken ?? undefined
});
@@ -273,13 +327,20 @@ export class IndexingPipeline {
// Local filesystem crawl.
const result = await this.localCrawler.crawl({
rootPath: repo.sourceUrl,
ref: repo.branch !== 'main' ? (repo.branch ?? undefined) : undefined
ref: ref ?? (repo.branch !== 'main' ? (repo.branch ?? undefined) : undefined)
});
return { files: result.files, totalFiles: result.totalFiles };
}
}
private getVersionTag(versionId: string): string | undefined {
const row = this.db
.prepare<[string], { tag: string }>(`SELECT tag FROM repository_versions WHERE id = ?`)
.get(versionId);
return row?.tag;
}
// -------------------------------------------------------------------------
// Private — atomic snippet replacement
// -------------------------------------------------------------------------
@@ -376,6 +437,16 @@ export class IndexingPipeline {
};
}
private computeVersionStats(versionId: string): { totalSnippets: number } {
const row = this.db
.prepare<[string], { total_snippets: number }>(
`SELECT COUNT(*) as total_snippets FROM snippets WHERE version_id = ?`
)
.get(versionId);
return { totalSnippets: row?.total_snippets ?? 0 };
}
// -------------------------------------------------------------------------
// Private — DB helpers
// -------------------------------------------------------------------------
@@ -425,6 +496,73 @@ export class IndexingPipeline {
const values = [...Object.values(allFields), id];
this.db.prepare(`UPDATE repositories SET ${sets} WHERE id = ?`).run(...values);
}
private updateVersion(id: string, fields: Record<string, unknown>): void {
const sets = Object.keys(fields)
.map((k) => `${toSnake(k)} = ?`)
.join(', ');
const values = [...Object.values(fields), id];
this.db.prepare(`UPDATE repository_versions SET ${sets} WHERE id = ?`).run(...values);
}
private upsertRepoConfig(
repositoryId: string,
versionId: string | null,
rules: string[]
): void {
const now = Math.floor(Date.now() / 1000);
// Use DELETE + INSERT because ON CONFLICT … DO UPDATE doesn't work reliably
// with partial unique indexes in all SQLite versions.
if (versionId === null) {
this.db
.prepare(
`DELETE FROM repository_configs WHERE repository_id = ? AND version_id IS NULL`
)
.run(repositoryId);
} else {
this.db
.prepare(
`DELETE FROM repository_configs WHERE repository_id = ? AND version_id = ?`
)
.run(repositoryId, versionId);
}
this.db
.prepare(
`INSERT INTO repository_configs (repository_id, version_id, rules, updated_at)
VALUES (?, ?, ?, ?)`
)
.run(repositoryId, versionId, JSON.stringify(rules), now);
}
// -------------------------------------------------------------------------
// Private — static helpers
// -------------------------------------------------------------------------
/**
* Returns true when `filePath` matches the given exclude `pattern`.
*
* Supported patterns:
* - Plain filename: `migration-guide.md` matches any path ending in `/migration-guide.md`
* or equal to `migration-guide.md`.
* - Glob prefix with wildcard: `docs/migration*` matches paths that start with `docs/migration`.
* - Exact path: `src/legacy/old-api.ts` matches exactly that path.
*/
private static matchesExcludePattern(filePath: string, pattern: string): boolean {
if (pattern.includes('*')) {
// Glob-style: treat everything before the '*' as a required prefix.
const prefix = pattern.slice(0, pattern.indexOf('*'));
return filePath.startsWith(prefix);
}
// No wildcard — treat as plain name or exact path.
if (!pattern.includes('/')) {
// Plain filename: match basename (path ends with /<pattern> or equals pattern).
return filePath === pattern || filePath.endsWith('/' + pattern);
}
// Contains a slash — exact path match.
return filePath === pattern;
}
}
// ---------------------------------------------------------------------------

View File

@@ -36,14 +36,17 @@ export class JobQueue {
* existing job instead of creating a duplicate.
*/
enqueue(repositoryId: string, versionId?: string): IndexingJob {
// Return early if there's already an active job for this repo.
// Return early if there's already an active job for this exact (repo, version) pair.
const resolvedVersionId = versionId ?? null;
const activeRaw = this.db
.prepare<[string], IndexingJobEntity>(
.prepare<[string, string | null, string | null], IndexingJobEntity>(
`${JOB_SELECT}
WHERE repository_id = ? AND status IN ('queued', 'running')
WHERE repository_id = ?
AND (version_id = ? OR (version_id IS NULL AND ? IS NULL))
AND status IN ('queued', 'running')
ORDER BY created_at DESC LIMIT 1`
)
.get(repositoryId);
.get(repositoryId, resolvedVersionId, resolvedVersionId);
if (activeRaw) {
// Ensure the queue is draining even if enqueue was called concurrently.

View File

@@ -395,7 +395,7 @@ describe('HybridSearchService', () => {
seedSnippet(client, { repositoryId: repoId, documentId: docId, content: 'hello world' });
const svc = new HybridSearchService(client, searchService, null);
const results = await svc.search('hello', { repositoryId: repoId });
const { results } = await svc.search('hello', { repositoryId: repoId });
expect(results.length).toBeGreaterThan(0);
expect(results[0].snippet.content).toBe('hello world');
@@ -406,14 +406,14 @@ describe('HybridSearchService', () => {
const provider = makeMockProvider([[1, 0]]);
const svc = new HybridSearchService(client, searchService, provider);
const results = await svc.search('alpha zero', { repositoryId: repoId, alpha: 0 });
const { results } = await svc.search('alpha zero', { repositoryId: repoId, alpha: 0 });
expect(results.length).toBeGreaterThan(0);
});
it('returns empty array when FTS5 query is blank and no provider', async () => {
const svc = new HybridSearchService(client, searchService, null);
const results = await svc.search(' ', { repositoryId: repoId });
const { results } = await svc.search(' ', { repositoryId: repoId });
expect(results).toHaveLength(0);
});
@@ -425,7 +425,7 @@ describe('HybridSearchService', () => {
});
const svc = new HybridSearchService(client, searchService, makeNoopProvider());
const results = await svc.search('noop fallback', { repositoryId: repoId });
const { results } = await svc.search('noop fallback', { repositoryId: repoId });
expect(results.length).toBeGreaterThan(0);
});
@@ -445,7 +445,7 @@ describe('HybridSearchService', () => {
const provider = makeMockProvider([[1, 0, 0, 0]]);
const svc = new HybridSearchService(client, searchService, provider);
const results = await svc.search('hybrid search', {
const { results } = await svc.search('hybrid search', {
repositoryId: repoId,
alpha: 0.5
});
@@ -464,7 +464,7 @@ describe('HybridSearchService', () => {
const provider = makeMockProvider([[1, 0]]);
const svc = new HybridSearchService(client, searchService, provider);
const results = await svc.search('deduplicate snippet', {
const { results } = await svc.search('deduplicate snippet', {
repositoryId: repoId,
alpha: 0.5
});
@@ -487,7 +487,7 @@ describe('HybridSearchService', () => {
const provider = makeMockProvider([[1, 0]]);
const svc = new HybridSearchService(client, searchService, provider);
const results = await svc.search('pagination test', {
const { results } = await svc.search('pagination test', {
repositoryId: repoId,
limit: 3,
alpha: 0.5
@@ -519,7 +519,7 @@ describe('HybridSearchService', () => {
const provider = makeMockProvider([[1, 0]]);
const svc = new HybridSearchService(client, searchService, provider);
const results = await svc.search('anything', {
const { results } = await svc.search('anything', {
repositoryId: repoId,
alpha: 1
});
@@ -543,7 +543,7 @@ describe('HybridSearchService', () => {
const provider = makeMockProvider([[1, 0]]);
const svc = new HybridSearchService(client, searchService, provider);
const results = await svc.search('metadata check', {
const { results } = await svc.search('metadata check', {
repositoryId: repoId,
alpha: 0.5
});
@@ -580,7 +580,7 @@ describe('HybridSearchService', () => {
const provider = makeMockProvider([[1, 0]]);
const svc = new HybridSearchService(client, searchService, provider);
const results = await svc.search('repository keyword', {
const { results } = await svc.search('repository keyword', {
repositoryId: repoId,
alpha: 0.5
});
@@ -607,7 +607,7 @@ describe('HybridSearchService', () => {
const provider = makeMockProvider([[1, 0]]);
const svc = new HybridSearchService(client, searchService, provider);
const codeResults = await svc.search('function example', {
const { results: codeResults } = await svc.search('function example', {
repositoryId: repoId,
type: 'code',
alpha: 0.5
@@ -632,7 +632,7 @@ describe('HybridSearchService', () => {
const svc = new HybridSearchService(client, searchService, provider);
// Should not throw and should return results.
const results = await svc.search('default alpha hybrid', { repositoryId: repoId });
const { results } = await svc.search('default alpha hybrid', { repositoryId: repoId });
expect(Array.isArray(results)).toBe(true);
});
@@ -761,7 +761,7 @@ describe('HybridSearchService', () => {
const searchService = new SearchService(client);
const hybridService = new HybridSearchService(client, searchService, mockProvider);
const results = await hybridService.search('keyword', {
const { results } = await hybridService.search('keyword', {
repositoryId: repoId,
searchMode: 'keyword'
});
@@ -820,7 +820,7 @@ describe('HybridSearchService', () => {
const searchService = new SearchService(client);
const hybridService = new HybridSearchService(client, searchService, mockProvider);
const results = await hybridService.search('semantic', {
const { results } = await hybridService.search('semantic', {
repositoryId: repoId,
searchMode: 'semantic',
profileId: 'test-profile'
@@ -848,7 +848,7 @@ describe('HybridSearchService', () => {
const searchService = new SearchService(client);
const hybridService = new HybridSearchService(client, searchService, null);
const results = await hybridService.search('test query', {
const { results } = await hybridService.search('test query', {
repositoryId: repoId,
searchMode: 'semantic'
});
@@ -867,7 +867,7 @@ describe('HybridSearchService', () => {
const searchService = new SearchService(client);
const hybridService = new HybridSearchService(client, searchService, mockProvider);
const results = await hybridService.search(' ', {
const { results } = await hybridService.search(' ', {
repositoryId: repoId,
searchMode: 'semantic'
});
@@ -885,7 +885,7 @@ describe('HybridSearchService', () => {
const searchService = new SearchService(client);
const hybridService = new HybridSearchService(client, searchService, noopProvider);
const results = await hybridService.search('test query', {
const { results } = await hybridService.search('test query', {
repositoryId: repoId,
searchMode: 'semantic'
});
@@ -951,7 +951,7 @@ describe('HybridSearchService', () => {
const hybridService = new HybridSearchService(client, searchService, mockProvider);
// Query with heavy punctuation that preprocesses to nothing.
const results = await hybridService.search('!!!@@@###', {
const { results } = await hybridService.search('!!!@@@###', {
repositoryId: repoId,
searchMode: 'auto',
profileId: 'test-profile'
@@ -978,7 +978,7 @@ describe('HybridSearchService', () => {
const searchService = new SearchService(client);
const hybridService = new HybridSearchService(client, searchService, mockProvider);
const results = await hybridService.search('hello', {
const { results } = await hybridService.search('hello', {
repositoryId: repoId,
searchMode: 'auto'
});
@@ -1038,7 +1038,7 @@ describe('HybridSearchService', () => {
const hybridService = new HybridSearchService(client, searchService, mockProvider);
// Query that won't match through FTS after punctuation normalization.
const results = await hybridService.search('%%%vector%%%', {
const { results } = await hybridService.search('%%%vector%%%', {
repositoryId: repoId,
searchMode: 'hybrid',
alpha: 0.5,
@@ -1064,7 +1064,7 @@ describe('HybridSearchService', () => {
const searchService = new SearchService(client);
const hybridService = new HybridSearchService(client, searchService, null);
const results = await hybridService.search('!!!@@@###$$$', {
const { results } = await hybridService.search('!!!@@@###$$$', {
repositoryId: repoId
});

View File

@@ -101,9 +101,12 @@ export class HybridSearchService {
*
* @param query - Raw search string (preprocessing handled by SearchService).
* @param options - Search parameters including repositoryId and alpha blend.
* @returns Ranked array of SnippetSearchResult, deduplicated by snippet ID.
* @returns Object with ranked results array and the search mode actually used.
*/
async search(query: string, options: HybridSearchOptions): Promise<SnippetSearchResult[]> {
async search(
query: string,
options: HybridSearchOptions
): Promise<{ results: SnippetSearchResult[]; searchModeUsed: string }> {
const limit = options.limit ?? 20;
const mode = options.searchMode ?? 'auto';
@@ -127,12 +130,12 @@ export class HybridSearchService {
// Semantic mode: skip FTS entirely and use vector search only.
if (mode === 'semantic') {
if (!this.embeddingProvider || !query.trim()) {
return [];
return { results: [], searchModeUsed: 'semantic' };
}
const embeddings = await this.embeddingProvider.embed([query]);
if (embeddings.length === 0) {
return [];
return { results: [], searchModeUsed: 'semantic' };
}
const queryEmbedding = embeddings[0].values;
@@ -144,7 +147,10 @@ export class HybridSearchService {
});
const topIds = vectorResults.slice(0, limit).map((r) => r.snippetId);
return this.fetchSnippetsByIds(topIds, options.repositoryId, options.type);
return {
results: this.fetchSnippetsByIds(topIds, options.repositoryId, options.type),
searchModeUsed: 'semantic'
};
}
// FTS5 mode (keyword) or hybrid/auto modes: try FTS first.
@@ -157,7 +163,7 @@ export class HybridSearchService {
// Degenerate cases: no provider or pure FTS5 mode.
if (!this.embeddingProvider || alpha === 0) {
return ftsResults.slice(0, limit);
return { results: ftsResults.slice(0, limit), searchModeUsed: 'keyword' };
}
// For auto/hybrid modes: if FTS yielded results, use them; otherwise try vector.
@@ -168,14 +174,14 @@ export class HybridSearchService {
// No FTS results: try vector search as a fallback in auto/hybrid modes.
if (!query.trim()) {
// Query is empty; no point embedding it.
return [];
return { results: [], searchModeUsed: 'keyword_fallback' };
}
const embeddings = await this.embeddingProvider.embed([query]);
// If provider fails (Noop returns empty array), we're done.
if (embeddings.length === 0) {
return [];
return { results: [], searchModeUsed: 'keyword_fallback' };
}
const queryEmbedding = embeddings[0].values;
@@ -187,7 +193,10 @@ export class HybridSearchService {
});
const topIds = vectorResults.slice(0, limit).map((r) => r.snippetId);
return this.fetchSnippetsByIds(topIds, options.repositoryId, options.type);
return {
results: this.fetchSnippetsByIds(topIds, options.repositoryId, options.type),
searchModeUsed: 'keyword_fallback'
};
}
// FTS has results: use RRF to blend with vector search (if alpha < 1).
@@ -195,7 +204,7 @@ export class HybridSearchService {
// Provider may be a Noop (returns empty array) — fall back to FTS gracefully.
if (embeddings.length === 0) {
return ftsResults.slice(0, limit);
return { results: ftsResults.slice(0, limit), searchModeUsed: 'keyword' };
}
const queryEmbedding = embeddings[0].values;
@@ -210,7 +219,10 @@ export class HybridSearchService {
// Pure vector mode: skip RRF and return vector results directly.
if (alpha === 1) {
const topIds = vectorResults.slice(0, limit).map((r) => r.snippetId);
return this.fetchSnippetsByIds(topIds, options.repositoryId, options.type);
return {
results: this.fetchSnippetsByIds(topIds, options.repositoryId, options.type),
searchModeUsed: 'semantic'
};
}
// Build ranked lists for RRF. Score field is unused by RRF — only
@@ -221,7 +233,10 @@ export class HybridSearchService {
const fused = reciprocalRankFusion(ftsRanked, vecRanked);
const topIds = fused.slice(0, limit).map((r) => r.id);
return this.fetchSnippetsByIds(topIds, options.repositoryId, options.type);
return {
results: this.fetchSnippetsByIds(topIds, options.repositoryId, options.type),
searchModeUsed: 'hybrid'
};
}
// -------------------------------------------------------------------------

View File

@@ -0,0 +1,131 @@
import type Database from 'better-sqlite3';
import type { EmbeddingSettingsUpdateDto } from '$lib/dtos/embedding-settings.js';
import { createProviderFromProfile, getDefaultLocalProfile } from '$lib/server/embeddings/registry.js';
import { EmbeddingProfileMapper } from '$lib/server/mappers/embedding-profile.mapper.js';
import { EmbeddingProfile, EmbeddingProfileEntity } from '$lib/server/models/embedding-profile.js';
import { EmbeddingSettings } from '$lib/server/models/embedding-settings.js';
import { InvalidInputError } from '$lib/server/utils/validation.js';
export class EmbeddingSettingsService {
constructor(private readonly db: Database.Database) {}
getSettings(): EmbeddingSettings {
const profiles = this.loadProfiles();
const activeProfile = profiles.find((profile) => profile.isDefault && profile.enabled) ?? null;
return new EmbeddingSettings({ profiles, activeProfile });
}
async updateSettings(input: EmbeddingSettingsUpdateDto): Promise<EmbeddingSettings> {
const now = Math.floor(Date.now() / 1000);
this.db.prepare('UPDATE embedding_profiles SET is_default = 0, updated_at = ?').run(now);
if (input.activeProfileId === null) {
return this.getSettings();
}
const profile =
input.activeProfileId === 'local-default'
? this.buildDefaultLocalProfile(now)
: this.buildCustomProfile(input, now);
const available = await createProviderFromProfile(profile).isAvailable();
if (!available) {
throw new InvalidInputError(
`Could not connect to the "${profile.providerKind}" provider. Check your configuration.`
);
}
this.persistProfile(profile);
return this.getSettings();
}
private loadProfiles(): EmbeddingProfile[] {
return this.db
.prepare('SELECT * FROM embedding_profiles ORDER BY is_default DESC, created_at ASC')
.all()
.map((row) => EmbeddingProfileMapper.fromEntity(new EmbeddingProfileEntity(row as never)));
}
private buildDefaultLocalProfile(now: number): EmbeddingProfile {
const defaultLocal = getDefaultLocalProfile();
return new EmbeddingProfile({
id: defaultLocal.id,
providerKind: defaultLocal.providerKind,
title: defaultLocal.title,
enabled: true,
isDefault: true,
model: defaultLocal.model,
dimensions: defaultLocal.dimensions,
config: {},
createdAt: this.getCreatedAt(defaultLocal.id, now),
updatedAt: now
});
}
private buildCustomProfile(input: EmbeddingSettingsUpdateDto, now: number): EmbeddingProfile {
const candidate = input.profile;
if (!candidate) {
throw new InvalidInputError('profile is required for custom embedding providers');
}
if (candidate.id !== input.activeProfileId) {
throw new InvalidInputError('activeProfileId must match profile.id');
}
if (!candidate.title || !candidate.model) {
throw new InvalidInputError('profile title and model are required');
}
return new EmbeddingProfile({
id: candidate.id,
providerKind: candidate.providerKind,
title: candidate.title,
enabled: true,
isDefault: true,
model: candidate.model,
dimensions: candidate.dimensions,
config: candidate.config,
createdAt: this.getCreatedAt(candidate.id, now),
updatedAt: now
});
}
private getCreatedAt(id: string, fallback: number): number {
return (
this.db
.prepare<[string], { created_at: number }>('SELECT created_at FROM embedding_profiles WHERE id = ?')
.get(id)?.created_at ?? fallback
);
}
private persistProfile(profile: EmbeddingProfile): void {
this.db
.prepare(
`INSERT INTO embedding_profiles
(id, provider_kind, title, enabled, is_default, model, dimensions, config, created_at, updated_at)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
ON CONFLICT(id) DO UPDATE SET
provider_kind = excluded.provider_kind,
title = excluded.title,
enabled = excluded.enabled,
is_default = excluded.is_default,
model = excluded.model,
dimensions = excluded.dimensions,
config = excluded.config,
updated_at = excluded.updated_at`
)
.run(
profile.id,
profile.providerKind,
profile.title,
profile.enabled ? 1 : 0,
profile.isDefault ? 1 : 0,
profile.model,
profile.dimensions,
JSON.stringify(profile.config),
profile.createdAt,
profile.updatedAt
);
}
}

View File

@@ -27,16 +27,20 @@ function createTestDb(): Database.Database {
client.pragma('foreign_keys = ON');
const migrationsFolder = join(import.meta.dirname, '../db/migrations');
const migrationSql = readFileSync(join(migrationsFolder, '0000_large_master_chief.sql'), 'utf-8');
// Drizzle migration files use `--> statement-breakpoint` as separator.
const statements = migrationSql
for (const migration of [
'0000_large_master_chief.sql',
'0001_quick_nighthawk.sql',
'0002_silky_stellaris.sql'
]) {
const statements = readFileSync(join(migrationsFolder, migration), 'utf-8')
.split('--> statement-breakpoint')
.map((s) => s.trim())
.map((statement) => statement.trim())
.filter(Boolean);
for (const stmt of statements) {
client.exec(stmt);
for (const statement of statements) {
client.exec(statement);
}
}
return client;
@@ -408,6 +412,83 @@ describe('RepositoryService.getVersions()', () => {
});
});
// ---------------------------------------------------------------------------
// getIndexSummary()
// ---------------------------------------------------------------------------
describe('RepositoryService.getIndexSummary()', () => {
let client: Database.Database;
let service: RepositoryService;
beforeEach(() => {
client = createTestDb();
service = makeService(client);
service.add({ source: 'github', sourceUrl: 'https://github.com/facebook/react', branch: 'main' });
});
it('returns embedding counts and indexed version labels', () => {
const now = Math.floor(Date.now() / 1000);
const docId = crypto.randomUUID();
const versionDocId = crypto.randomUUID();
const snippetId = crypto.randomUUID();
const versionSnippetId = crypto.randomUUID();
client
.prepare(
`INSERT INTO repository_versions (id, repository_id, tag, state, created_at)
VALUES (?, '/facebook/react', ?, 'indexed', ?)`
)
.run('/facebook/react/v18.3.0', 'v18.3.0', now);
client
.prepare(
`INSERT INTO documents (id, repository_id, version_id, file_path, checksum, indexed_at)
VALUES (?, '/facebook/react', NULL, 'README.md', 'base', ?)`
)
.run(docId, now);
client
.prepare(
`INSERT INTO documents (id, repository_id, version_id, file_path, checksum, indexed_at)
VALUES (?, '/facebook/react', ?, 'README.md', 'version', ?)`
)
.run(versionDocId, '/facebook/react/v18.3.0', now);
client
.prepare(
`INSERT INTO snippets (id, document_id, repository_id, version_id, type, content, created_at)
VALUES (?, ?, '/facebook/react', NULL, 'info', 'base snippet', ?)`
)
.run(snippetId, docId, now);
client
.prepare(
`INSERT INTO snippets (id, document_id, repository_id, version_id, type, content, created_at)
VALUES (?, ?, '/facebook/react', ?, 'info', 'version snippet', ?)`
)
.run(versionSnippetId, versionDocId, '/facebook/react/v18.3.0', now);
client
.prepare(
`INSERT INTO snippet_embeddings (snippet_id, profile_id, model, dimensions, embedding, created_at)
VALUES (?, 'local-default', 'Xenova/all-MiniLM-L6-v2', 2, ?, ?)`
)
.run(snippetId, Buffer.from(Float32Array.from([1, 0]).buffer), now);
client
.prepare(
`INSERT INTO snippet_embeddings (snippet_id, profile_id, model, dimensions, embedding, created_at)
VALUES (?, 'local-default', 'Xenova/all-MiniLM-L6-v2', 2, ?, ?)`
)
.run(versionSnippetId, Buffer.from(Float32Array.from([0, 1]).buffer), now);
expect(service.getIndexSummary('/facebook/react')).toEqual({
embeddingCount: 2,
indexedVersions: ['main', 'v18.3.0']
});
});
});
// ---------------------------------------------------------------------------
// createIndexingJob()
// ---------------------------------------------------------------------------
@@ -448,4 +529,24 @@ describe('RepositoryService.createIndexingJob()', () => {
const job = service.createIndexingJob('/facebook/react', '/facebook/react/v18.3.0');
expect(job.versionId).toBe('/facebook/react/v18.3.0');
});
it('allows separate jobs for the same repo but different versions', () => {
const defaultJob = service.createIndexingJob('/facebook/react');
const versionJob = service.createIndexingJob('/facebook/react', '/facebook/react/v18.3.0');
expect(versionJob.id).not.toBe(defaultJob.id);
expect(defaultJob.versionId).toBeNull();
expect(versionJob.versionId).toBe('/facebook/react/v18.3.0');
});
it('returns the existing job when the same (repo, version) pair is already queued', () => {
const job1 = service.createIndexingJob('/facebook/react', '/facebook/react/v18.3.0');
const job2 = service.createIndexingJob('/facebook/react', '/facebook/react/v18.3.0');
expect(job2.id).toBe(job1.id);
});
it('returns the existing default-branch job when called again without a versionId', () => {
const job1 = service.createIndexingJob('/facebook/react');
const job2 = service.createIndexingJob('/facebook/react');
expect(job2.id).toBe(job1.id);
});
});

View File

@@ -39,6 +39,11 @@ export interface RepositoryStats {
lastIndexedAt: Date | null;
}
export interface RepositoryIndexSummary {
embeddingCount: number;
indexedVersions: string[];
}
export class RepositoryService {
constructor(private readonly db: Database.Database) {}
@@ -266,19 +271,65 @@ export class RepositoryService {
return rows.map((r) => r.tag);
}
getIndexSummary(repositoryId: string): RepositoryIndexSummary {
const repository = this.get(repositoryId);
if (!repository) throw new NotFoundError(`Repository ${repositoryId} not found`);
const embeddingRow = this.db
.prepare(
`SELECT COUNT(*) AS count
FROM snippet_embeddings se
INNER JOIN snippets s ON s.id = se.snippet_id
WHERE s.repository_id = ?`
)
.get(repositoryId) as { count: number };
const versionRows = this.db
.prepare(
`SELECT tag FROM repository_versions
WHERE repository_id = ? AND state = 'indexed'
ORDER BY created_at DESC`
)
.all(repositoryId) as { tag: string }[];
const hasDefaultBranchIndex = Boolean(
this.db
.prepare(
`SELECT 1 AS found
FROM documents
WHERE repository_id = ? AND version_id IS NULL
LIMIT 1`
)
.get(repositoryId)
);
const indexedVersions = [
...(hasDefaultBranchIndex ? [repository.branch ?? 'default branch'] : []),
...versionRows.map((row) => row.tag)
];
return {
embeddingCount: embeddingRow.count,
indexedVersions: Array.from(new Set(indexedVersions))
};
}
/**
* Create an indexing job for a repository.
* If a job is already running, returns the existing job.
*/
createIndexingJob(repositoryId: string, versionId?: string): IndexingJob {
// Check for running job
// Check for an existing queued/running job for this exact (repo, version) pair.
const resolvedVersionId = versionId ?? null;
const runningJob = this.db
.prepare(
`SELECT * FROM indexing_jobs
WHERE repository_id = ? AND status IN ('queued', 'running')
WHERE repository_id = ?
AND (version_id = ? OR (version_id IS NULL AND ? IS NULL))
AND status IN ('queued', 'running')
ORDER BY created_at DESC LIMIT 1`
)
.get(repositoryId) as IndexingJobEntity | undefined;
.get(repositoryId, resolvedVersionId, resolvedVersionId) as IndexingJobEntity | undefined;
if (runningJob) return IndexingJobMapper.fromEntity(new IndexingJobEntity(runningJob));

View File

@@ -32,6 +32,7 @@ export type RepositoryState = 'pending' | 'indexing' | 'indexed' | 'error';
export type SnippetType = 'code' | 'info';
export type JobStatus = 'queued' | 'running' | 'done' | 'failed';
export type VersionState = 'pending' | 'indexing' | 'indexed' | 'error';
export type EmbeddingProviderKind = 'local-transformers' | 'openai-compatible';
// ---------------------------------------------------------------------------
// API / service layer types

View File

@@ -34,11 +34,13 @@ vi.mock('$lib/server/embeddings/registry.js', () => ({
}));
import { POST as postLibraries } from './libs/+server.js';
import { GET as getLibraries } from './libs/+server.js';
import { GET as getLibrary } from './libs/[id]/+server.js';
import { GET as getJobs } from './jobs/+server.js';
import { GET as getJob } from './jobs/[id]/+server.js';
import { GET as getVersions, POST as postVersions } from './libs/[id]/versions/+server.js';
import { GET as getContext } from './context/+server.js';
import { DEFAULT_TOKEN_BUDGET } from '$lib/server/api/token-budget.js';
const NOW_S = Math.floor(Date.now() / 1000);
@@ -53,6 +55,7 @@ function createTestDb(): Database.Database {
const migration0 = readFileSync(join(migrationsFolder, '0000_large_master_chief.sql'), 'utf-8');
const migration1 = readFileSync(join(migrationsFolder, '0001_quick_nighthawk.sql'), 'utf-8');
const migration2 = readFileSync(join(migrationsFolder, '0002_silky_stellaris.sql'), 'utf-8');
const migration3 = readFileSync(join(migrationsFolder, '0003_multiversion_config.sql'), 'utf-8');
// Apply first migration
const statements0 = migration0
@@ -83,6 +86,15 @@ function createTestDb(): Database.Database {
client.exec(statement);
}
const statements3 = migration3
.split('--> statement-breakpoint')
.map((statement) => statement.trim())
.filter(Boolean);
for (const statement of statements3) {
client.exec(statement);
}
client.exec(readFileSync(ftsFile, 'utf-8'));
return client;
@@ -185,6 +197,16 @@ function seedSnippet(
return snippetId;
}
function seedEmbedding(client: Database.Database, snippetId: string, values: number[]): void {
client
.prepare(
`INSERT INTO snippet_embeddings
(snippet_id, profile_id, model, dimensions, embedding, created_at)
VALUES (?, 'local-default', 'Xenova/all-MiniLM-L6-v2', ?, ?, ?)`
)
.run(snippetId, values.length, Buffer.from(Float32Array.from(values).buffer), NOW_S);
}
function seedRules(client: Database.Database, repositoryId: string, rules: string[]) {
client
.prepare(
@@ -248,6 +270,36 @@ describe('API contract integration', () => {
expect(body).not.toHaveProperty('total_snippets');
});
it('GET /api/v1/libs includes embedding counts and indexed versions per repository', async () => {
const repositoryId = seedRepo(db);
const versionId = seedVersion(db, repositoryId, 'v18.3.0');
const baseDocId = seedDocument(db, repositoryId);
const versionDocId = seedDocument(db, repositoryId, versionId);
const baseSnippetId = seedSnippet(db, {
documentId: baseDocId,
repositoryId,
content: 'Base branch snippet'
});
const versionSnippetId = seedSnippet(db, {
documentId: versionDocId,
repositoryId,
versionId,
content: 'Versioned snippet'
});
seedEmbedding(db, baseSnippetId, [1, 0]);
seedEmbedding(db, versionSnippetId, [0, 1]);
const response = await getLibraries({
url: new URL('http://test/api/v1/libs')
} as never);
expect(response.status).toBe(200);
const body = await response.json();
expect(body.libraries).toHaveLength(1);
expect(body.libraries[0].embeddingCount).toBe(2);
expect(body.libraries[0].indexedVersions).toEqual(['main', 'v18.3.0']);
});
it('GET /api/v1/jobs and /api/v1/jobs/:id return job DTOs in camelCase', async () => {
const repoService = new RepositoryService(db);
repoService.add({ source: 'github', sourceUrl: 'https://github.com/facebook/react' });
@@ -306,6 +358,37 @@ describe('API contract integration', () => {
expect(getBody.versions[0]).not.toHaveProperty('total_snippets');
});
it('POST /api/v1/libs/:id/versions creates distinct jobs for different versions of the same repo', async () => {
const repoService = new RepositoryService(db);
repoService.add({ source: 'github', sourceUrl: 'https://github.com/facebook/react' });
const postV1 = await postVersions({
params: { id: encodeURIComponent('/facebook/react') },
request: new Request('http://test/api/v1/libs/%2Ffacebook%2Freact/versions', {
method: 'POST',
headers: { 'content-type': 'application/json' },
body: JSON.stringify({ tag: 'v18.3.0', autoIndex: true })
})
} as never);
const bodyV1 = await postV1.json();
const postV2 = await postVersions({
params: { id: encodeURIComponent('/facebook/react') },
request: new Request('http://test/api/v1/libs/%2Ffacebook%2Freact/versions', {
method: 'POST',
headers: { 'content-type': 'application/json' },
body: JSON.stringify({ tag: 'v17.0.2', autoIndex: true })
})
} as never);
const bodyV2 = await postV2.json();
expect(postV1.status).toBe(201);
expect(postV2.status).toBe(201);
expect(bodyV1.job.id).not.toBe(bodyV2.job.id);
expect(bodyV1.job.versionId).toBe('/facebook/react/v18.3.0');
expect(bodyV2.job.versionId).toBe('/facebook/react/v17.0.2');
});
it('GET /api/v1/context returns informative txt output for empty results', async () => {
const repositoryId = seedRepo(db);
@@ -325,6 +408,40 @@ describe('API contract integration', () => {
expect(body).toContain('Result count: 0');
});
it('GET /api/v1/context does not token-filter default JSON responses for the UI', async () => {
const repositoryId = seedRepo(db);
const documentId = seedDocument(db, repositoryId);
seedSnippet(db, {
documentId,
repositoryId,
type: 'info',
title: 'Large result',
content: 'Large result body',
tokenCount: DEFAULT_TOKEN_BUDGET + 1
});
seedSnippet(db, {
documentId,
repositoryId,
type: 'info',
title: 'Small result',
content: 'Small result body',
tokenCount: 5
});
const response = await getContext({
url: new URL(
`http://test/api/v1/context?libraryId=${encodeURIComponent(repositoryId)}&query=${encodeURIComponent('result')}`
)
} as never);
expect(response.status).toBe(200);
const body = await response.json();
expect(body.snippets).toHaveLength(2);
expect(body.resultCount).toBe(2);
});
it('GET /api/v1/context returns additive repository and version metadata for versioned results', async () => {
const repositoryId = seedRepo(db);
const versionId = seedVersion(db, repositoryId, 'v18.3.0');
@@ -379,4 +496,172 @@ describe('API contract integration', () => {
isLocal: false
});
});
it('GET /api/v1/context returns merged repo-wide and version-specific rules', async () => {
const repositoryId = seedRepo(db);
const versionId = seedVersion(db, repositoryId, 'v2.0.0');
const documentId = seedDocument(db, repositoryId, versionId);
// Insert repo-wide rules (version_id IS NULL).
db.prepare(
`INSERT INTO repository_configs (repository_id, version_id, rules, updated_at)
VALUES (?, NULL, ?, ?)`
).run(repositoryId, JSON.stringify(['Repo-wide rule']), NOW_S);
// Insert version-specific rules.
db.prepare(
`INSERT INTO repository_configs (repository_id, version_id, rules, updated_at)
VALUES (?, ?, ?, ?)`
).run(repositoryId, versionId, JSON.stringify(['Version-specific rule']), NOW_S);
seedSnippet(db, {
documentId,
repositoryId,
versionId,
content: 'some versioned content'
});
const response = await getContext({
url: new URL(
`http://test/api/v1/context?libraryId=${encodeURIComponent(`${repositoryId}/v2.0.0`)}&query=${encodeURIComponent('versioned content')}`
)
} as never);
expect(response.status).toBe(200);
const body = await response.json();
// Both repo-wide and version-specific rules should appear (deduped).
expect(body.rules).toEqual(['Repo-wide rule', 'Version-specific rule']);
});
it('GET /api/v1/context returns only repo-wide rules when no version is requested', async () => {
const repositoryId = seedRepo(db);
const documentId = seedDocument(db, repositoryId);
// Insert repo-wide rules (version_id IS NULL).
db.prepare(
`INSERT INTO repository_configs (repository_id, version_id, rules, updated_at)
VALUES (?, NULL, ?, ?)`
).run(repositoryId, JSON.stringify(['Repo-wide rule only']), NOW_S);
seedSnippet(db, { documentId, repositoryId, content: 'some content' });
const response = await getContext({
url: new URL(
`http://test/api/v1/context?libraryId=${encodeURIComponent(repositoryId)}&query=${encodeURIComponent('some content')}`
)
} as never);
expect(response.status).toBe(200);
const body = await response.json();
expect(body.rules).toEqual(['Repo-wide rule only']);
});
it('GET /api/v1/context deduplicates rules that appear in both repo-wide and version config', async () => {
const repositoryId = seedRepo(db);
const versionId = seedVersion(db, repositoryId, 'v3.0.0');
const documentId = seedDocument(db, repositoryId, versionId);
const sharedRule = 'Use TypeScript strict mode';
db.prepare(
`INSERT INTO repository_configs (repository_id, version_id, rules, updated_at)
VALUES (?, NULL, ?, ?)`
).run(repositoryId, JSON.stringify([sharedRule]), NOW_S);
db.prepare(
`INSERT INTO repository_configs (repository_id, version_id, rules, updated_at)
VALUES (?, ?, ?, ?)`
).run(repositoryId, versionId, JSON.stringify([sharedRule, 'Version-only rule']), NOW_S);
seedSnippet(db, { documentId, repositoryId, versionId, content: 'dedup test content' });
const response = await getContext({
url: new URL(
`http://test/api/v1/context?libraryId=${encodeURIComponent(`${repositoryId}/v3.0.0`)}&query=${encodeURIComponent('dedup test')}`
)
} as never);
expect(response.status).toBe(200);
const body = await response.json();
// sharedRule appears once, version-only rule appended.
expect(body.rules).toEqual([sharedRule, 'Version-only rule']);
});
it('GET /api/v1/context returns 404 with VERSION_NOT_FOUND when version does not exist', async () => {
const repositoryId = seedRepo(db);
const response = await getContext({
url: new URL(
`http://test/api/v1/context?libraryId=${encodeURIComponent(`${repositoryId}/v99.0.0`)}&query=${encodeURIComponent('foo')}`
)
} as never);
expect(response.status).toBe(404);
const body = await response.json();
expect(body.code).toBe('VERSION_NOT_FOUND');
});
it('GET /api/v1/context resolves a version by full commit SHA', async () => {
const repositoryId = seedRepo(db);
const fullSha = 'a'.repeat(40);
// Insert version with a commit_hash
db.prepare(
`INSERT INTO repository_versions
(id, repository_id, tag, commit_hash, state, total_snippets, indexed_at, created_at)
VALUES (?, ?, ?, ?, 'indexed', 0, ?, ?)`
).run(`${repositoryId}/v2.0.0`, repositoryId, 'v2.0.0', fullSha, NOW_S, NOW_S);
const response = await getContext({
url: new URL(
`http://test/api/v1/context?libraryId=${encodeURIComponent(`${repositoryId}/${fullSha}`)}&query=${encodeURIComponent('anything')}`
)
} as never);
expect(response.status).toBe(200);
const body = await response.json();
expect(body.version?.resolved).toBe('v2.0.0');
});
it('GET /api/v1/context resolves a version by short SHA prefix (8 chars)', async () => {
const repositoryId = seedRepo(db);
const fullSha = 'b1c2d3e4f5a6b7c8d9e0f1a2b3c4d5e6f7a8b9c0';
const shortSha = fullSha.slice(0, 8);
db.prepare(
`INSERT INTO repository_versions
(id, repository_id, tag, commit_hash, state, total_snippets, indexed_at, created_at)
VALUES (?, ?, ?, ?, 'indexed', 0, ?, ?)`
).run(`${repositoryId}/v3.0.0`, repositoryId, 'v3.0.0', fullSha, NOW_S, NOW_S);
const response = await getContext({
url: new URL(
`http://test/api/v1/context?libraryId=${encodeURIComponent(`${repositoryId}/${shortSha}`)}&query=${encodeURIComponent('anything')}`
)
} as never);
expect(response.status).toBe(200);
const body = await response.json();
expect(body.version?.resolved).toBe('v3.0.0');
});
it('GET /api/v1/context includes searchModeUsed in JSON response', async () => {
const repositoryId = seedRepo(db);
const documentId = seedDocument(db, repositoryId);
seedSnippet(db, {
documentId,
repositoryId,
content: 'search mode used test snippet'
});
const response = await getContext({
url: new URL(
`http://test/api/v1/context?libraryId=${encodeURIComponent(repositoryId)}&query=${encodeURIComponent('search mode used')}`
)
} as never);
expect(response.status).toBe(200);
const body = await response.json();
expect(body.searchModeUsed).toBeDefined();
expect(['keyword', 'semantic', 'hybrid', 'keyword_fallback']).toContain(body.searchModeUsed);
});
});

View File

@@ -17,7 +17,11 @@ import { dtoJsonResponse } from '$lib/server/api/dto-response';
import { SearchService } from '$lib/server/search/search.service';
import { HybridSearchService } from '$lib/server/search/hybrid.search.service';
import { createProviderFromProfile } from '$lib/server/embeddings/registry';
import type { EmbeddingProfile } from '$lib/server/db/schema';
import {
EmbeddingProfileEntity,
type EmbeddingProfileEntityProps
} from '$lib/server/models/embedding-profile';
import { EmbeddingProfileMapper } from '$lib/server/mappers/embedding-profile.mapper';
import { parseLibraryId } from '$lib/server/api/library-id';
import { selectSnippetsWithinBudget, DEFAULT_TOKEN_BUDGET } from '$lib/server/api/token-budget';
import { formatContextJson, formatContextTxt, CORS_HEADERS } from '$lib/server/api/formatters';
@@ -32,40 +36,70 @@ function getServices(db: ReturnType<typeof getClient>) {
// Load the active embedding profile from the database
const profileRow = db
.prepare<
[],
EmbeddingProfile
>('SELECT * FROM embedding_profiles WHERE is_default = 1 AND enabled = 1 LIMIT 1')
.prepare<[], EmbeddingProfileEntityProps>(
'SELECT * FROM embedding_profiles WHERE is_default = 1 AND enabled = 1 LIMIT 1'
)
.get();
const provider = profileRow ? createProviderFromProfile(profileRow) : null;
const profile = profileRow
? EmbeddingProfileMapper.fromEntity(new EmbeddingProfileEntity(profileRow))
: null;
const provider = profile ? createProviderFromProfile(profile) : null;
const hybridService = new HybridSearchService(db, searchService, provider);
return { db, searchService, hybridService, profileId: profileRow?.id };
return { db, searchService, hybridService, profileId: profile?.id };
}
interface RawRepoConfig {
rules: string | null;
}
function getRules(db: ReturnType<typeof getClient>, repositoryId: string): string[] {
const row = db
.prepare<
[string],
RawRepoConfig
>(`SELECT rules FROM repository_configs WHERE repository_id = ?`)
.get(repositoryId);
if (!row?.rules) return [];
function parseRulesJson(raw: string | null | undefined): string[] {
if (!raw) return [];
try {
const parsed = JSON.parse(row.rules);
const parsed = JSON.parse(raw);
return Array.isArray(parsed) ? (parsed as string[]) : [];
} catch {
return [];
}
}
function getRules(
db: ReturnType<typeof getClient>,
repositoryId: string,
versionId?: string
): string[] {
// Repo-wide rules (version_id IS NULL).
const repoRow = db
.prepare<
[string],
RawRepoConfig
>(`SELECT rules FROM repository_configs WHERE repository_id = ? AND version_id IS NULL`)
.get(repositoryId);
const repoRules = parseRulesJson(repoRow?.rules);
if (!versionId) return repoRules;
// Version-specific rules.
const versionRow = db
.prepare<
[string, string],
RawRepoConfig
>(`SELECT rules FROM repository_configs WHERE repository_id = ? AND version_id = ?`)
.get(repositoryId, versionId);
const versionRules = parseRulesJson(versionRow?.rules);
// Merge: repo-wide first, then version-specific (deduped by content).
const seen = new Set(repoRules);
const merged = [...repoRules];
for (const r of versionRules) {
if (!seen.has(r)) merged.push(r);
}
return merged;
}
interface RawRepoState {
state: 'pending' | 'indexing' | 'indexed' | 'error';
id: string;
@@ -124,6 +158,7 @@ export const GET: RequestHandler = async ({ url }) => {
}
const responseType = url.searchParams.get('type') ?? 'json';
const applyTokenBudget = responseType === 'txt' || url.searchParams.has('tokens');
const tokensRaw = parseInt(url.searchParams.get('tokens') ?? String(DEFAULT_TOKEN_BUDGET), 10);
const maxTokens = isNaN(tokensRaw) || tokensRaw < 1 ? DEFAULT_TOKEN_BUDGET : tokensRaw;
@@ -191,6 +226,7 @@ export const GET: RequestHandler = async ({ url }) => {
let versionId: string | undefined;
let resolvedVersion: RawVersionRow | undefined;
if (parsed.version) {
// Try exact tag match first.
resolvedVersion = db
.prepare<
[string, string],
@@ -198,12 +234,33 @@ export const GET: RequestHandler = async ({ url }) => {
>(`SELECT id, tag FROM repository_versions WHERE repository_id = ? AND tag = ?`)
.get(parsed.repositoryId, parsed.version);
// Version not found is not fatal — fall back to default branch.
versionId = resolvedVersion?.id;
// Fall back to commit hash prefix match (min 7 chars).
if (!resolvedVersion && parsed.version.length >= 7) {
resolvedVersion = db
.prepare<
[string, string],
RawVersionRow
>(
`SELECT id, tag FROM repository_versions
WHERE repository_id = ? AND commit_hash LIKE ?`
)
.get(parsed.repositoryId, `${parsed.version}%`);
}
if (!resolvedVersion) {
return new Response(
JSON.stringify({
error: `Version ${parsed.version} not found for library ${parsed.repositoryId}`,
code: 'VERSION_NOT_FOUND'
}),
{ status: 404, headers: { 'Content-Type': 'application/json', ...CORS_HEADERS } }
);
}
versionId = resolvedVersion.id;
}
// Execute hybrid search (falls back to FTS5 when no embedding provider is set).
const searchResults = await hybridService.search(query, {
const { results: searchResults, searchModeUsed } = await hybridService.search(query, {
repositoryId: parsed.repositoryId,
versionId,
limit: 50, // fetch more than needed; token budget will trim
@@ -212,15 +269,17 @@ export const GET: RequestHandler = async ({ url }) => {
profileId
});
// Apply token budget.
const selectedResults = applyTokenBudget
? (() => {
const snippets = searchResults.map((r) => r.snippet);
const selected = selectSnippetsWithinBudget(snippets, maxTokens);
// Re-wrap selected snippets as SnippetSearchResult for formatters.
const selectedResults = selected.map((snippet) => {
return selected.map((snippet) => {
const found = searchResults.find((r) => r.snippet.id === snippet.id)!;
return found;
});
})()
: searchResults;
const snippetVersionIds = Array.from(
new Set(
@@ -233,6 +292,7 @@ export const GET: RequestHandler = async ({ url }) => {
const metadata: ContextResponseMetadata = {
localSource: repo.source === 'local',
resultCount: selectedResults.length,
searchModeUsed,
repository: {
id: repo.id,
title: repo.title,
@@ -251,8 +311,8 @@ export const GET: RequestHandler = async ({ url }) => {
snippetVersions
};
// Load rules from repository_configs.
const rules = getRules(db, parsed.repositoryId);
// Load rules from repository_configs (repo-wide + version-specific merged).
const rules = getRules(db, parsed.repositoryId, versionId);
if (responseType === 'txt') {
const text = formatContextTxt(selectedResults, rules, metadata);

View File

@@ -32,7 +32,8 @@ export const GET: RequestHandler = ({ url }) => {
const enriched = libraries.map((repo) => ({
...RepositoryMapper.toDto(repo),
versions: service.getVersions(repo.id)
versions: service.getVersions(repo.id),
...service.getIndexSummary(repo.id)
}));
return json({ libraries: enriched, total, limit, offset });

View File

@@ -23,7 +23,7 @@ export const GET: RequestHandler = ({ params }) => {
return json({ error: 'Repository not found', code: 'NOT_FOUND' }, { status: 404 });
}
const versions = service.getVersions(id);
return json({ ...RepositoryMapper.toDto(repo), versions });
return json({ ...RepositoryMapper.toDto(repo), versions, ...service.getIndexSummary(id) });
} catch (err) {
return handleServiceError(err);
}

View File

@@ -1,17 +1,23 @@
/**
* POST /api/v1/libs/:id/index — trigger an indexing job for a repository.
*
* Also enqueues jobs for all registered versions so that re-indexing a repo
* automatically covers its secondary versions.
*/
import { json } from '@sveltejs/kit';
import type { RequestHandler } from './$types';
import { getClient } from '$lib/server/db/client';
import { IndexingJobMapper } from '$lib/server/mappers/indexing-job.mapper.js';
import { RepositoryService } from '$lib/server/services/repository.service';
import { VersionService } from '$lib/server/services/version.service';
import { getQueue } from '$lib/server/pipeline/startup';
import { handleServiceError, NotFoundError } from '$lib/server/utils/validation';
export const POST: RequestHandler = async ({ params, request }) => {
try {
const service = new RepositoryService(getClient());
const db = getClient();
const service = new RepositoryService(db);
const versionService = new VersionService(db);
const id = decodeURIComponent(params.id);
const repo = service.get(id);
@@ -30,7 +36,20 @@ export const POST: RequestHandler = async ({ params, request }) => {
const queue = getQueue();
const job = queue ? queue.enqueue(id, versionId) : service.createIndexingJob(id, versionId);
return json({ job: IndexingJobMapper.toDto(job) }, { status: 202 });
// Also enqueue jobs for all registered versions (dedup in queue makes this safe).
// Only when this is a default-branch re-index (no explicit versionId requested).
let versionJobs: ReturnType<typeof IndexingJobMapper.toDto>[] = [];
if (!versionId) {
const versions = versionService.list(id);
versionJobs = versions.map((version) => {
const vJob = queue
? queue.enqueue(id, version.id)
: service.createIndexingJob(id, version.id);
return IndexingJobMapper.toDto(vJob);
});
}
return json({ job: IndexingJobMapper.toDto(job), versionJobs }, { status: 202 });
} catch (err) {
return handleServiceError(err);
}

View File

@@ -0,0 +1,182 @@
/**
* Unit tests for POST /api/v1/libs/:id/index
*
* Verifies:
* - Default-branch re-index also enqueues jobs for all registered versions
* - versionJobs array is returned in the response
* - Explicit versionId request does NOT trigger extra version jobs
* - Returns 404 when repo does not exist
*/
import { beforeEach, describe, expect, it, vi } from 'vitest';
import Database from 'better-sqlite3';
import { readFileSync } from 'node:fs';
import { join } from 'node:path';
import { RepositoryService } from '$lib/server/services/repository.service';
import { VersionService } from '$lib/server/services/version.service';
let db: Database.Database;
let mockQueue: { enqueue: ReturnType<typeof vi.fn> } | null = null;
vi.mock('$lib/server/db/client', () => ({
getClient: () => db
}));
vi.mock('$lib/server/db/client.js', () => ({
getClient: () => db
}));
vi.mock('$lib/server/pipeline/startup', () => ({
getQueue: () => mockQueue
}));
vi.mock('$lib/server/pipeline/startup.js', () => ({
getQueue: () => mockQueue
}));
vi.mock('$lib/server/embeddings/registry', () => ({
createProviderFromProfile: () => null
}));
vi.mock('$lib/server/embeddings/registry.js', () => ({
createProviderFromProfile: () => null
}));
import { POST as postIndex } from './+server.js';
const NOW_S = Math.floor(Date.now() / 1000);
function createTestDb(): Database.Database {
const client = new Database(':memory:');
client.pragma('foreign_keys = ON');
const migrationsFolder = join(import.meta.dirname, '../../../../../../lib/server/db/migrations');
const ftsFile = join(import.meta.dirname, '../../../../../../lib/server/db/fts.sql');
const migration0 = readFileSync(join(migrationsFolder, '0000_large_master_chief.sql'), 'utf-8');
const migration1 = readFileSync(join(migrationsFolder, '0001_quick_nighthawk.sql'), 'utf-8');
const migration2 = readFileSync(join(migrationsFolder, '0002_silky_stellaris.sql'), 'utf-8');
for (const migration of [migration0, migration1, migration2]) {
for (const stmt of migration
.split('--> statement-breakpoint')
.map((s) => s.trim())
.filter(Boolean)) {
client.exec(stmt);
}
}
client.exec(readFileSync(ftsFile, 'utf-8'));
return client;
}
function makeEnqueueJob(repositoryId: string, versionId?: string) {
return {
id: `job-${Math.random().toString(36).slice(2)}`,
repositoryId,
versionId: versionId ?? null,
status: 'queued' as const,
processedFiles: 0,
totalFiles: 0,
error: null,
startedAt: null,
completedAt: null,
createdAt: new Date(NOW_S * 1000)
};
}
describe('POST /api/v1/libs/:id/index', () => {
beforeEach(() => {
db = createTestDb();
mockQueue = null;
});
it('returns 404 when repo does not exist', async () => {
const response = await postIndex({
params: { id: encodeURIComponent('/nonexistent/repo') },
request: new Request('http://test', { method: 'POST' })
} as never);
expect(response.status).toBe(404);
});
it('returns job and empty versionJobs when no versions are registered', async () => {
const repoService = new RepositoryService(db);
repoService.add({ source: 'github', sourceUrl: 'https://github.com/facebook/react' });
const response = await postIndex({
params: { id: encodeURIComponent('/facebook/react') },
request: new Request('http://test', { method: 'POST' })
} as never);
expect(response.status).toBe(202);
const body = await response.json();
expect(body.job).toBeDefined();
expect(body.job.repositoryId).toBe('/facebook/react');
expect(body.versionJobs).toEqual([]);
});
it('enqueues jobs for all registered versions on default-branch re-index', async () => {
const repoService = new RepositoryService(db);
const versionService = new VersionService(db);
repoService.add({ source: 'github', sourceUrl: 'https://github.com/facebook/react' });
versionService.add('/facebook/react', 'v18.3.0', 'React v18.3.0');
versionService.add('/facebook/react', 'v17.0.0', 'React v17.0.0');
const enqueue = vi.fn().mockImplementation(
(repositoryId: string, versionId?: string) => makeEnqueueJob(repositoryId, versionId)
);
mockQueue = { enqueue };
const response = await postIndex({
params: { id: encodeURIComponent('/facebook/react') },
request: new Request('http://test', { method: 'POST' })
} as never);
expect(response.status).toBe(202);
const body = await response.json();
// Main job enqueued (no versionId)
expect(body.job).toBeDefined();
expect(body.job.repositoryId).toBe('/facebook/react');
// Two version jobs enqueued
expect(body.versionJobs).toHaveLength(2);
expect(enqueue).toHaveBeenCalledTimes(3); // 1 main + 2 versions
// Version IDs should be the registered version IDs
const enqueuedVersionIds = enqueue.mock.calls.slice(1).map((call) => call[1]);
expect(enqueuedVersionIds).toContain('/facebook/react/v18.3.0');
expect(enqueuedVersionIds).toContain('/facebook/react/v17.0.0');
});
it('does NOT enqueue version jobs when an explicit versionId is provided', async () => {
const repoService = new RepositoryService(db);
const versionService = new VersionService(db);
repoService.add({ source: 'github', sourceUrl: 'https://github.com/facebook/react' });
versionService.add('/facebook/react', 'v18.3.0', 'React v18.3.0');
const enqueue = vi.fn().mockImplementation(
(repositoryId: string, versionId?: string) => makeEnqueueJob(repositoryId, versionId)
);
mockQueue = { enqueue };
const response = await postIndex({
params: { id: encodeURIComponent('/facebook/react') },
request: new Request('http://test', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ version: '/facebook/react/v18.3.0' })
})
} as never);
expect(response.status).toBe(202);
const body = await response.json();
// Only one call — the explicit version, no extra version enumeration
expect(enqueue).toHaveBeenCalledTimes(1);
expect(body.versionJobs).toEqual([]);
});
});

View File

@@ -10,6 +10,7 @@ import { RepositoryVersionMapper } from '$lib/server/mappers/repository-version.
import { IndexingJobMapper } from '$lib/server/mappers/indexing-job.mapper.js';
import { RepositoryService } from '$lib/server/services/repository.service';
import { VersionService } from '$lib/server/services/version.service';
import { getQueue } from '$lib/server/pipeline/startup';
import { handleServiceError, NotFoundError, InvalidInputError } from '$lib/server/utils/validation';
function getServices() {
@@ -78,7 +79,10 @@ export const POST: RequestHandler = async ({ params, request }) => {
let job: ReturnType<typeof IndexingJobMapper.toDto> | undefined;
if (autoIndex) {
const indexingJob = repoService.createIndexingJob(repositoryId, version.id);
const queue = getQueue();
const indexingJob = queue
? queue.enqueue(repositoryId, version.id)
: repoService.createIndexingJob(repositoryId, version.id);
job = IndexingJobMapper.toDto(indexingJob);
}

View File

@@ -0,0 +1,63 @@
/**
* POST /api/v1/libs/:id/versions/discover — discover git tags for a local repository.
*
* Returns { tags: Array<{ tag: string; commitHash: string }> }.
* For GitHub repositories or when tag discovery fails, returns { tags: [] } (not an error).
* Returns 404 if the repository does not exist.
*/
import { json } from '@sveltejs/kit';
import type { RequestHandler } from './$types';
import { getClient } from '$lib/server/db/client';
import { RepositoryService } from '$lib/server/services/repository.service';
import { VersionService } from '$lib/server/services/version.service';
import { handleServiceError, NotFoundError } from '$lib/server/utils/validation';
function getServices() {
const db = getClient();
return {
repoService: new RepositoryService(db),
versionService: new VersionService(db)
};
}
// ---------------------------------------------------------------------------
// POST /api/v1/libs/:id/versions/discover
// ---------------------------------------------------------------------------
export const POST: RequestHandler = ({ params }) => {
try {
const { repoService, versionService } = getServices();
const repositoryId = decodeURIComponent(params.id);
const repo = repoService.get(repositoryId);
if (!repo) {
throw new NotFoundError(`Repository ${repositoryId} not found`);
}
try {
const tags = versionService.discoverTags(repositoryId);
return json({ tags });
} catch {
// GitHub repos or git errors — return empty tags gracefully
return json({ tags: [] });
}
} catch (err) {
return handleServiceError(err);
}
};
// ---------------------------------------------------------------------------
// OPTIONS preflight
// ---------------------------------------------------------------------------
export const OPTIONS: RequestHandler = () => {
return new Response(null, {
status: 204,
headers: {
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Methods': 'POST, OPTIONS',
'Access-Control-Allow-Headers': 'Content-Type, Authorization'
}
});
};

View File

@@ -0,0 +1,160 @@
/**
* Unit tests for POST /api/v1/libs/:id/versions/discover
*
* Verifies:
* - Local repo returns discovered tags
* - GitHub repo returns empty tags gracefully (no error)
* - Non-existent repo returns 404
*/
import { beforeEach, describe, expect, it, vi } from 'vitest';
import Database from 'better-sqlite3';
import { readFileSync } from 'node:fs';
import { join } from 'node:path';
let db: Database.Database;
vi.mock('$lib/server/db/client', () => ({
getClient: () => db
}));
vi.mock('$lib/server/db/client.js', () => ({
getClient: () => db
}));
vi.mock('$lib/server/pipeline/startup', () => ({
getQueue: () => null
}));
vi.mock('$lib/server/pipeline/startup.js', () => ({
getQueue: () => null
}));
// Mock git utilities so tests don't require a real git repo
vi.mock('$lib/server/utils/git', () => ({
discoverVersionTags: vi.fn(),
resolveTagToCommit: vi.fn()
}));
vi.mock('$lib/server/utils/git.js', () => ({
discoverVersionTags: vi.fn(),
resolveTagToCommit: vi.fn()
}));
import { POST as postDiscover } from './+server.js';
const NOW_S = Math.floor(Date.now() / 1000);
function createTestDb(): Database.Database {
const client = new Database(':memory:');
client.pragma('foreign_keys = ON');
const migrationsFolder = join(import.meta.dirname, '../../../../../../../lib/server/db/migrations');
const ftsFile = join(import.meta.dirname, '../../../../../../../lib/server/db/fts.sql');
const migration0 = readFileSync(join(migrationsFolder, '0000_large_master_chief.sql'), 'utf-8');
const migration1 = readFileSync(join(migrationsFolder, '0001_quick_nighthawk.sql'), 'utf-8');
const migration2 = readFileSync(join(migrationsFolder, '0002_silky_stellaris.sql'), 'utf-8');
for (const migration of [migration0, migration1, migration2]) {
for (const stmt of migration
.split('--> statement-breakpoint')
.map((s) => s.trim())
.filter(Boolean)) {
client.exec(stmt);
}
}
client.exec(readFileSync(ftsFile, 'utf-8'));
return client;
}
function seedRepo(
client: Database.Database,
overrides: { id?: string; source?: 'github' | 'local'; sourceUrl?: string } = {}
): string {
const id = overrides.id ?? '/facebook/react';
client
.prepare(
`INSERT INTO repositories
(id, title, source, source_url, state, created_at, updated_at)
VALUES (?, ?, ?, ?, 'indexed', ?, ?)`
)
.run(
id,
'React',
overrides.source ?? 'github',
overrides.sourceUrl ?? 'https://github.com/facebook/react',
NOW_S,
NOW_S
);
return id;
}
describe('POST /api/v1/libs/:id/versions/discover', () => {
beforeEach(async () => {
db = createTestDb();
const git = await import('$lib/server/utils/git');
vi.mocked(git.discoverVersionTags).mockReset();
vi.mocked(git.resolveTagToCommit).mockReset();
});
it('returns 404 when repo does not exist', async () => {
const response = await postDiscover({
params: { id: encodeURIComponent('/nonexistent/repo') }
} as never);
expect(response.status).toBe(404);
const body = await response.json();
expect(body.error).toBeDefined();
});
it('returns discovered tags for a local repository', async () => {
const { discoverVersionTags, resolveTagToCommit } = await import('$lib/server/utils/git');
vi.mocked(discoverVersionTags).mockReturnValue(['v2.0.0', 'v1.0.0']);
vi.mocked(resolveTagToCommit).mockImplementation(({ tag }) =>
tag === 'v2.0.0' ? 'abc12345' : 'def67890'
);
seedRepo(db, { source: 'local', sourceUrl: '/home/user/myrepo' });
const response = await postDiscover({
params: { id: encodeURIComponent('/facebook/react') }
} as never);
expect(response.status).toBe(200);
const body = await response.json();
expect(body.tags).toHaveLength(2);
expect(body.tags[0]).toEqual({ tag: 'v2.0.0', commitHash: 'abc12345' });
expect(body.tags[1]).toEqual({ tag: 'v1.0.0', commitHash: 'def67890' });
});
it('returns empty tags for a GitHub repository (no error)', async () => {
seedRepo(db, { source: 'github', sourceUrl: 'https://github.com/facebook/react' });
const response = await postDiscover({
params: { id: encodeURIComponent('/facebook/react') }
} as never);
expect(response.status).toBe(200);
const body = await response.json();
expect(body.tags).toEqual([]);
});
it('returns empty tags when git discovery throws', async () => {
const { discoverVersionTags } = await import('$lib/server/utils/git');
vi.mocked(discoverVersionTags).mockImplementation(() => {
throw new Error('git command failed');
});
seedRepo(db, { source: 'local', sourceUrl: '/home/user/myrepo' });
const response = await postDiscover({
params: { id: encodeURIComponent('/facebook/react') }
} as never);
expect(response.status).toBe(200);
const body = await response.json();
expect(body.tags).toEqual([]);
});
});

View File

@@ -1,30 +1,25 @@
/**
* GET /api/v1/settings/embedding — retrieve all embedding profiles
* POST /api/v1/settings/embedding — create or update an embedding profile
* PUT /api/v1/settings/embedding — alias for POST (backward compat)
* GET /api/v1/settings/embedding — retrieve embedding settings
* POST /api/v1/settings/embedding — update active embedding settings
* PUT /api/v1/settings/embedding — alias for POST
*/
import { json } from '@sveltejs/kit';
import type { RequestHandler } from './$types';
import type { EmbeddingSettingsUpdateDto } from '$lib/dtos/embedding-settings.js';
import { getClient } from '$lib/server/db/client';
import { createProviderFromProfile } from '$lib/server/embeddings/registry';
import type { EmbeddingProfile, NewEmbeddingProfile } from '$lib/server/db/schema';
import { EmbeddingSettingsDtoMapper } from '$lib/server/mappers/embedding-settings.dto.mapper.js';
import { EmbeddingSettingsService } from '$lib/server/services/embedding-settings.service.js';
import { handleServiceError, InvalidInputError } from '$lib/server/utils/validation';
// ---------------------------------------------------------------------------
// GET — Return all profiles
// GET — Return embedding settings
// ---------------------------------------------------------------------------
export const GET: RequestHandler = () => {
try {
const db = getClient();
const profiles = db
.prepare('SELECT * FROM embedding_profiles ORDER BY is_default DESC, created_at ASC')
.all() as EmbeddingProfile[];
// Sanitize: remove sensitive config fields like apiKey
const safeProfiles = profiles.map(sanitizeProfile);
return json({ profiles: safeProfiles });
const service = new EmbeddingSettingsService(getClient());
return json(EmbeddingSettingsDtoMapper.toDto(service.getSettings()));
} catch (err) {
return handleServiceError(err);
}
@@ -34,116 +29,23 @@ export const GET: RequestHandler = () => {
// POST/PUT — Create or update a profile
// ---------------------------------------------------------------------------
async function upsertProfile(body: unknown) {
async function upsertSettings(body: unknown) {
if (typeof body !== 'object' || body === null) {
throw new InvalidInputError('Request body must be a JSON object');
}
const obj = body as Record<string, unknown>;
// Required fields
if (typeof obj.id !== 'string' || !obj.id) {
throw new InvalidInputError('id is required');
}
if (typeof obj.providerKind !== 'string' || !obj.providerKind) {
throw new InvalidInputError('providerKind is required');
}
if (typeof obj.title !== 'string' || !obj.title) {
throw new InvalidInputError('title is required');
}
if (typeof obj.model !== 'string' || !obj.model) {
throw new InvalidInputError('model is required');
}
if (typeof obj.dimensions !== 'number') {
throw new InvalidInputError('dimensions must be a number');
}
const profile: NewEmbeddingProfile = {
id: obj.id,
providerKind: obj.providerKind,
title: obj.title,
enabled: typeof obj.enabled === 'boolean' ? obj.enabled : true,
isDefault: typeof obj.isDefault === 'boolean' ? obj.isDefault : false,
model: obj.model,
dimensions: obj.dimensions,
config: (obj.config as Record<string, unknown>) ?? {},
createdAt: Date.now(),
updatedAt: Date.now()
};
// Validate provider availability before persisting
const provider = createProviderFromProfile(profile as EmbeddingProfile);
const available = await provider.isAvailable();
if (!available) {
throw new InvalidInputError(
`Could not connect to the "${profile.providerKind}" provider. Check your configuration.`
);
}
const db = getClient();
// If setting as default, clear other defaults first
if (profile.isDefault) {
db.prepare('UPDATE embedding_profiles SET is_default = 0').run();
}
// Upsert the profile
db.prepare(
`INSERT INTO embedding_profiles
(id, provider_kind, title, enabled, is_default, model, dimensions, config, created_at, updated_at)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
ON CONFLICT(id) DO UPDATE SET
provider_kind = excluded.provider_kind,
title = excluded.title,
enabled = excluded.enabled,
is_default = excluded.is_default,
model = excluded.model,
dimensions = excluded.dimensions,
config = excluded.config,
updated_at = excluded.updated_at`
).run(
profile.id,
profile.providerKind,
profile.title,
profile.enabled ? 1 : 0,
profile.isDefault ? 1 : 0,
profile.model,
profile.dimensions,
JSON.stringify(profile.config),
profile.createdAt,
profile.updatedAt
);
const inserted = db
.prepare('SELECT * FROM embedding_profiles WHERE id = ?')
.get(profile.id) as EmbeddingProfile;
return sanitizeProfile(inserted);
const service = new EmbeddingSettingsService(getClient());
const settings = await service.updateSettings(body as EmbeddingSettingsUpdateDto);
return EmbeddingSettingsDtoMapper.toDto(settings);
}
export const POST: RequestHandler = async ({ request }) => {
try {
const body = await request.json();
const profile = await upsertProfile(body);
return json(profile);
return json(await upsertSettings(body));
} catch (err) {
return handleServiceError(err);
}
};
// Backward compat alias
export const PUT: RequestHandler = POST;
// ---------------------------------------------------------------------------
// Sanitize — remove sensitive config fields before returning to clients
// ---------------------------------------------------------------------------
function sanitizeProfile(profile: EmbeddingProfile): EmbeddingProfile {
const config = profile.config as Record<string, unknown>;
if (config && config.apiKey) {
const rest = { ...config };
delete rest.apiKey;
return { ...profile, config: rest };
}
return profile;
}

View File

@@ -0,0 +1,183 @@
import { beforeEach, describe, expect, it, vi } from 'vitest';
import Database from 'better-sqlite3';
import { readFileSync } from 'node:fs';
import { join } from 'node:path';
let db: Database.Database;
vi.mock('$lib/server/db/client', () => ({
getClient: () => db
}));
vi.mock('$lib/server/db/client.js', () => ({
getClient: () => db
}));
vi.mock('$lib/server/embeddings/registry', () => ({
createProviderFromProfile: () => ({
isAvailable: async () => true
})
}));
vi.mock('$lib/server/embeddings/registry.js', () => ({
createProviderFromProfile: () => ({
isAvailable: async () => true
})
}));
vi.mock('$lib/server/embeddings/local.provider', () => ({
LocalEmbeddingProvider: class {
readonly model = 'Xenova/all-MiniLM-L6-v2';
readonly dimensions = 384;
async isAvailable() {
return true;
}
}
}));
vi.mock('$lib/server/embeddings/local.provider.js', () => ({
LocalEmbeddingProvider: class {
readonly model = 'Xenova/all-MiniLM-L6-v2';
readonly dimensions = 384;
async isAvailable() {
return true;
}
}
}));
import { GET as getEmbeddingSettings, PUT as putEmbeddingSettings } from './+server.js';
import { GET as getEmbeddingTest } from './test/+server.js';
function createTestDb(): Database.Database {
const client = new Database(':memory:');
client.pragma('foreign_keys = ON');
const migrationsFolder = join(import.meta.dirname, '../../../../../lib/server/db/migrations');
const ftsFile = join(import.meta.dirname, '../../../../../lib/server/db/fts.sql');
for (const migration of [
'0000_large_master_chief.sql',
'0001_quick_nighthawk.sql',
'0002_silky_stellaris.sql'
]) {
const statements = readFileSync(join(migrationsFolder, migration), 'utf-8')
.split('--> statement-breakpoint')
.map((statement) => statement.trim())
.filter(Boolean);
for (const statement of statements) {
client.exec(statement);
}
}
client.exec(readFileSync(ftsFile, 'utf-8'));
return client;
}
describe('embedding settings routes', () => {
beforeEach(() => {
db = createTestDb();
});
it('GET /api/v1/settings/embedding returns profile-based settings for the seeded default profile', async () => {
const response = await getEmbeddingSettings({} as never);
expect(response.status).toBe(200);
const body = await response.json();
expect(body.activeProfileId).toBe('local-default');
expect(body.activeProfile).toMatchObject({
id: 'local-default',
providerKind: 'local-transformers',
title: 'Local (Xenova/all-MiniLM-L6-v2)'
});
expect(body.profiles).toHaveLength(1);
expect(body.profiles[0].providerKind).toBe('local-transformers');
expect(body.profiles[0].isDefault).toBe(true);
});
it('PUT /api/v1/settings/embedding persists a clean profile-based OpenAI payload', async () => {
const response = await putEmbeddingSettings({
request: new Request('http://test/api/v1/settings/embedding', {
method: 'PUT',
headers: { 'content-type': 'application/json' },
body: JSON.stringify({
activeProfileId: 'openai-default',
profile: {
id: 'openai-default',
providerKind: 'openai-compatible',
title: 'OpenAI-compatible',
model: 'text-embedding-3-small',
dimensions: 1536,
config: {
baseUrl: 'https://api.openai.com/v1',
apiKey: 'sk-test',
model: 'text-embedding-3-small'
}
}
})
})
} as never);
expect(response.status).toBe(200);
const body = await response.json();
expect(body.activeProfileId).toBe('openai-default');
expect(body.activeProfile).toMatchObject({
id: 'openai-default',
providerKind: 'openai-compatible'
});
expect(body.activeProfile.config).toEqual({
baseUrl: 'https://api.openai.com/v1',
model: 'text-embedding-3-small'
});
expect(body.activeProfile.configEntries).toEqual(
expect.arrayContaining([
expect.objectContaining({ key: 'apiKey', value: '[redacted]', redacted: true })
])
);
expect(body.profiles).toEqual(
expect.arrayContaining([
expect.objectContaining({
id: 'openai-default',
providerKind: 'openai-compatible',
model: 'text-embedding-3-small',
dimensions: 1536,
isDefault: true
})
])
);
const activeProfile = db
.prepare(
'SELECT id, provider_kind, is_default, enabled, model, dimensions FROM embedding_profiles WHERE is_default = 1 LIMIT 1'
)
.get() as Record<string, unknown>;
expect(activeProfile).toMatchObject({
id: 'openai-default',
provider_kind: 'openai-compatible',
is_default: 1,
enabled: 1,
model: 'text-embedding-3-small',
dimensions: 1536
});
});
it('GET /api/v1/settings/embedding/test checks local-provider availability directly', async () => {
const response = await getEmbeddingTest({} as never);
expect(response.status).toBe(200);
const body = await response.json();
expect(body).toEqual({
available: true,
profile: {
id: 'local-default',
providerKind: 'local-transformers',
model: 'Xenova/all-MiniLM-L6-v2',
dimensions: 384
}
});
});
});

View File

@@ -7,35 +7,24 @@
import { json } from '@sveltejs/kit';
import type { RequestHandler } from './$types';
import { getClient } from '$lib/server/db/client';
import { LocalEmbeddingProvider } from '$lib/server/embeddings/local.provider';
import { createProviderFromProfile } from '$lib/server/embeddings/registry';
import type { EmbeddingProfile } from '$lib/server/db/schema';
import { EmbeddingProfileEntity } from '$lib/server/models/embedding-profile';
import { EmbeddingProfileMapper } from '$lib/server/mappers/embedding-profile.mapper';
import { handleServiceError } from '$lib/server/utils/validation';
export const GET: RequestHandler = async () => {
try {
const db = getClient();
const profile = db
.prepare<
[],
EmbeddingProfile
>('SELECT * FROM embedding_profiles WHERE is_default = 1 AND enabled = 1 LIMIT 1')
.get();
if (!profile) {
return json({ available: false, error: 'No active embedding profile configured' });
}
const provider = createProviderFromProfile(profile);
const provider = new LocalEmbeddingProvider();
const available = await provider.isAvailable();
return json({
available,
profile: {
id: profile.id,
providerKind: profile.providerKind,
model: profile.model,
dimensions: profile.dimensions
id: 'local-default',
providerKind: 'local-transformers',
model: provider.model,
dimensions: provider.dimensions
}
});
} catch (err) {
@@ -46,19 +35,43 @@ export const GET: RequestHandler = async () => {
export const POST: RequestHandler = async ({ request }) => {
try {
const body = await request.json();
const config = validateConfig(body);
if (config.provider === 'none') {
throw new InvalidInputError('Cannot test the "none" provider — no backend is configured.');
if (typeof body !== 'object' || body === null) {
throw new Error('Request body must be a JSON object');
}
const provider = createProviderFromConfig(config);
const candidate = body as Record<string, unknown>;
if (candidate.providerKind !== 'openai-compatible') {
throw new Error('Only openai-compatible providers can be tested via this endpoint');
}
if (typeof candidate.model !== 'string' || typeof candidate.dimensions !== 'number') {
throw new Error('model and dimensions are required');
}
const provider = createProviderFromProfile(
EmbeddingProfileMapper.fromEntity(
new EmbeddingProfileEntity({
id: typeof candidate.id === 'string' ? candidate.id : 'test-openai-profile',
provider_kind: 'openai-compatible',
title: typeof candidate.title === 'string' ? candidate.title : 'Test Provider',
enabled: true,
is_default: false,
model: candidate.model,
dimensions: candidate.dimensions,
config:
typeof candidate.config === 'object' && candidate.config !== null
? (candidate.config as Record<string, unknown>)
: {},
created_at: Date.now(),
updated_at: Date.now()
})
)
);
const available = await provider.isAvailable();
if (!available) {
return new Response(
JSON.stringify({
error: `Provider "${config.provider}" is not available. Check your configuration.`
error: 'Provider is not available. Check your configuration.'
}),
{ status: 400, headers: { 'Content-Type': 'application/json' } }
);

View File

@@ -2,8 +2,8 @@ import type { PageServerLoad } from './$types';
import { error } from '@sveltejs/kit';
export const load: PageServerLoad = async ({ fetch, params }) => {
const id = params.id;
const res = await fetch(`/api/v1/libs/${encodeURIComponent(id)}`);
const repositoryId = decodeURIComponent(params.id);
const res = await fetch(`/api/v1/libs/${encodeURIComponent(repositoryId)}`);
if (res.status === 404) {
error(404, 'Repository not found');
@@ -16,7 +16,9 @@ export const load: PageServerLoad = async ({ fetch, params }) => {
const repo = await res.json();
// Fetch recent jobs
const jobsRes = await fetch(`/api/v1/jobs?repositoryId=${encodeURIComponent(id)}&limit=5`);
const jobsRes = await fetch(
`/api/v1/jobs?repositoryId=${encodeURIComponent(repositoryId)}&limit=5`
);
const jobsData = jobsRes.ok ? await jobsRes.json() : { jobs: [] };
return {

View File

@@ -1,28 +1,63 @@
<script lang="ts">
import { goto } from '$app/navigation';
import { resolve as resolveRoute } from '$app/paths';
import { onMount } from 'svelte';
import type { PageData } from './$types';
import type { Repository, RepositoryVersion, IndexingJob } from '$lib/types';
import type { Repository, IndexingJob } from '$lib/types';
import ConfirmDialog from '$lib/components/ConfirmDialog.svelte';
import IndexingProgress from '$lib/components/IndexingProgress.svelte';
import StatBadge from '$lib/components/StatBadge.svelte';
let { data }: { data: PageData } = $props();
// Initialized empty; $effect syncs from data prop on every navigation/reload.
let repo = $state<Repository & { versions?: RepositoryVersion[] }>(
{} as Repository & { versions?: RepositoryVersion[] }
let repoOverride = $state<
(Repository & { indexedVersions?: string[]; embeddingCount?: number }) | null
>(null);
const repo = $derived(
repoOverride ??
((data.repo ?? {}) as Repository & {
indexedVersions?: string[];
embeddingCount?: number;
})
);
let recentJobs = $state<IndexingJob[]>([]);
$effect(() => {
if (data.repo) repo = data.repo;
recentJobs = data.recentJobs ?? [];
});
const recentJobs = $derived((data.recentJobs ?? []) as IndexingJob[]);
let showDeleteConfirm = $state(false);
let activeJobId = $state<string | null>(null);
let errorMessage = $state<string | null>(null);
let successMessage = $state<string | null>(null);
// Version management state
interface VersionDto {
id: string;
repositoryId: string;
tag: string;
title: string | null;
commitHash: string | null;
state: 'pending' | 'indexing' | 'indexed' | 'error';
totalSnippets: number;
indexedAt: string | null;
createdAt: string;
}
let versions = $state<VersionDto[]>([]);
let versionsLoading = $state(false);
// Add version form
let addVersionTag = $state('');
let addVersionBusy = $state(false);
// Discover tags state
let discoverBusy = $state(false);
let discoveredTags = $state<Array<{ tag: string; commitHash: string }>>([]);
let selectedDiscoveredTags = $state<Set<string>>(new Set());
let showDiscoverPanel = $state(false);
let registerBusy = $state(false);
// Active version indexing jobs: tag -> jobId
let activeVersionJobs = $state<Record<string, string | undefined>>({});
// Remove confirm
let removeTag = $state<string | null>(null);
const stateColors: Record<string, string> = {
pending: 'bg-gray-100 text-gray-600',
indexing: 'bg-blue-100 text-blue-700',
@@ -41,13 +76,32 @@
try {
const res = await fetch(`/api/v1/libs/${encodeURIComponent(repo.id)}`);
if (res.ok) {
repo = await res.json();
repoOverride = await res.json();
}
} catch {
// ignore
}
}
async function loadVersions() {
versionsLoading = true;
try {
const res = await fetch(`/api/v1/libs/${encodeURIComponent(repo.id)}/versions`);
if (res.ok) {
const data = await res.json();
versions = data.versions ?? [];
}
} catch {
// ignore
} finally {
versionsLoading = false;
}
}
onMount(() => {
loadVersions();
});
async function handleReindex() {
errorMessage = null;
successMessage = null;
@@ -63,8 +117,22 @@
if (d.job?.id) {
activeJobId = d.job.id;
}
successMessage = 'Re-indexing started.';
await refreshRepo();
const versionCount = d.versionJobs?.length ?? 0;
if (versionCount > 0) {
let next = { ...activeVersionJobs };
for (const vj of d.versionJobs) {
const matched = versions.find((v) => v.id === vj.versionId);
if (matched) {
next = { ...next, [matched.tag]: vj.id };
}
}
activeVersionJobs = next;
}
successMessage =
versionCount > 0
? `Re-indexing started. Also queued ${versionCount} version job${versionCount === 1 ? '' : 's'}.`
: 'Re-indexing started.';
await Promise.all([refreshRepo(), loadVersions()]);
} catch (e) {
errorMessage = (e as Error).message;
}
@@ -87,12 +155,151 @@
}
}
async function handleAddVersion() {
const tag = addVersionTag.trim();
if (!tag) return;
addVersionBusy = true;
errorMessage = null;
try {
const res = await fetch(`/api/v1/libs/${encodeURIComponent(repo.id)}/versions`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ tag, autoIndex: true })
});
if (!res.ok) {
const d = await res.json();
throw new Error(d.error ?? 'Failed to add version');
}
const d = await res.json();
if (d.job?.id) {
activeVersionJobs = { ...activeVersionJobs, [tag]: d.job.id };
}
addVersionTag = '';
await loadVersions();
} catch (e) {
errorMessage = (e as Error).message;
} finally {
addVersionBusy = false;
}
}
async function handleIndexVersion(tag: string) {
errorMessage = null;
try {
const res = await fetch(
`/api/v1/libs/${encodeURIComponent(repo.id)}/versions/${encodeURIComponent(tag)}/index`,
{ method: 'POST' }
);
if (!res.ok) {
const d = await res.json();
throw new Error(d.error ?? 'Failed to queue version indexing');
}
const d = await res.json();
if (d.job?.id) {
activeVersionJobs = { ...activeVersionJobs, [tag]: d.job.id };
}
} catch (e) {
errorMessage = (e as Error).message;
}
}
async function handleRemoveVersion() {
if (!removeTag) return;
const tag = removeTag;
removeTag = null;
errorMessage = null;
try {
const res = await fetch(
`/api/v1/libs/${encodeURIComponent(repo.id)}/versions/${encodeURIComponent(tag)}`,
{ method: 'DELETE' }
);
if (!res.ok && res.status !== 204) {
const d = await res.json();
throw new Error(d.error ?? 'Failed to remove version');
}
await loadVersions();
} catch (e) {
errorMessage = (e as Error).message;
}
}
async function handleDiscoverTags() {
discoverBusy = true;
errorMessage = null;
try {
const res = await fetch(
`/api/v1/libs/${encodeURIComponent(repo.id)}/versions/discover`,
{ method: 'POST' }
);
if (!res.ok) {
const d = await res.json();
throw new Error(d.error ?? 'Failed to discover tags');
}
const d = await res.json();
const registeredTags = new Set(versions.map((v) => v.tag));
discoveredTags = (d.tags ?? []).filter(
(t: { tag: string; commitHash: string }) => !registeredTags.has(t.tag)
);
selectedDiscoveredTags = new Set(discoveredTags.map((t) => t.tag));
showDiscoverPanel = true;
} catch (e) {
errorMessage = (e as Error).message;
} finally {
discoverBusy = false;
}
}
function toggleDiscoveredTag(tag: string) {
const next = new Set(selectedDiscoveredTags);
if (next.has(tag)) {
next.delete(tag);
} else {
next.add(tag);
}
selectedDiscoveredTags = next;
}
async function handleRegisterSelected() {
if (selectedDiscoveredTags.size === 0) return;
registerBusy = true;
errorMessage = null;
try {
const tags = [...selectedDiscoveredTags];
const responses = await Promise.all(
tags.map((tag) =>
fetch(`/api/v1/libs/${encodeURIComponent(repo.id)}/versions`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ tag, autoIndex: true })
})
)
);
const results = await Promise.all(responses.map((r) => (r.ok ? r.json() : null)));
let next = { ...activeVersionJobs };
for (let i = 0; i < tags.length; i++) {
const result = results[i];
if (result?.job?.id) {
next = { ...next, [tags[i]]: result.job.id };
}
}
activeVersionJobs = next;
showDiscoverPanel = false;
discoveredTags = [];
selectedDiscoveredTags = new Set();
await loadVersions();
} catch (e) {
errorMessage = (e as Error).message;
} finally {
registerBusy = false;
}
}
function formatDate(ts: Date | number | string | null | undefined): string {
if (!ts) return 'Never';
return new Date(ts as string).toLocaleString();
}
const versions = $derived(repo.versions ?? []);
const embeddingCount = $derived(repo.embeddingCount ?? 0);
const totalSnippets = $derived(repo.totalSnippets ?? 0);
const totalTokens = $derived(repo.totalTokens ?? 0);
const trustScore = $derived(repo.trustScore ?? 0);
@@ -169,7 +376,13 @@
{#if activeJobId}
<div class="mt-4 rounded-xl border border-blue-100 bg-blue-50 p-4">
<p class="mb-2 text-sm font-medium text-blue-700">Indexing in progress</p>
<IndexingProgress jobId={activeJobId} />
<IndexingProgress
jobId={activeJobId}
oncomplete={() => {
activeJobId = null;
refreshRepo();
}}
/>
</div>
{:else if repo.state === 'error'}
<div class="mt-4 rounded-xl border border-red-100 bg-red-50 p-4">
@@ -180,6 +393,7 @@
<!-- Stats -->
<div class="mt-6 grid grid-cols-2 gap-3 sm:grid-cols-4">
<StatBadge label="Snippets" value={totalSnippets.toLocaleString()} />
<StatBadge label="Embeddings" value={embeddingCount.toLocaleString()} />
<StatBadge label="Tokens" value={totalTokens.toLocaleString()} />
<StatBadge label="Trust Score" value="{trustScore.toFixed(1)}/10" />
{#if repo.stars != null}
@@ -187,6 +401,146 @@
{/if}
</div>
<!-- Versions -->
<div class="mt-6 rounded-xl border border-gray-200 bg-white p-5">
<div class="mb-4 flex flex-wrap items-center justify-between gap-3">
<h2 class="text-sm font-semibold text-gray-700">Versions</h2>
<div class="flex flex-wrap items-center gap-2">
<!-- Add version inline form -->
<form
onsubmit={(e) => {
e.preventDefault();
handleAddVersion();
}}
class="flex items-center gap-1.5"
>
<input
type="text"
bind:value={addVersionTag}
placeholder="e.g. v2.0.0"
class="rounded-lg border border-gray-200 px-3 py-1.5 text-sm text-gray-900 placeholder-gray-400 focus:border-blue-400 focus:outline-none"
/>
<button
type="submit"
disabled={addVersionBusy || !addVersionTag.trim()}
class="rounded-lg bg-blue-600 px-3 py-1.5 text-sm font-medium text-white hover:bg-blue-700 disabled:cursor-not-allowed disabled:opacity-50"
>
Add
</button>
</form>
<!-- Discover tags button — local repos only -->
{#if repo.source === 'local'}
<button
onclick={handleDiscoverTags}
disabled={discoverBusy}
class="rounded-lg border border-gray-200 px-3 py-1.5 text-sm font-medium text-gray-700 hover:bg-gray-50 disabled:cursor-not-allowed disabled:opacity-50"
>
{discoverBusy ? 'Discovering...' : 'Discover tags'}
</button>
{/if}
</div>
</div>
<!-- Discover panel -->
{#if showDiscoverPanel}
<div class="mb-4 rounded-lg border border-blue-100 bg-blue-50 p-4">
<div class="mb-2 flex items-center justify-between">
<p class="text-sm font-medium text-blue-700">
{discoveredTags.length === 0
? 'No new tags found'
: `${discoveredTags.length} new tag${discoveredTags.length === 1 ? '' : 's'} available`}
</p>
<button
onclick={() => {
showDiscoverPanel = false;
discoveredTags = [];
selectedDiscoveredTags = new Set();
}}
class="text-xs text-blue-600 hover:underline"
>
Close
</button>
</div>
{#if discoveredTags.length > 0}
<div class="mb-3 flex flex-col gap-1.5">
{#each discoveredTags as discovered (discovered.tag)}
<label class="flex cursor-pointer items-center gap-2 text-sm">
<input
type="checkbox"
checked={selectedDiscoveredTags.has(discovered.tag)}
onchange={() => toggleDiscoveredTag(discovered.tag)}
class="rounded border-gray-300"
/>
<span class="font-mono text-gray-800">{discovered.tag}</span>
<span class="font-mono text-xs text-gray-400">{discovered.commitHash.slice(0, 8)}</span>
</label>
{/each}
</div>
<button
onclick={handleRegisterSelected}
disabled={registerBusy || selectedDiscoveredTags.size === 0}
class="rounded-lg bg-blue-600 px-3 py-1.5 text-sm font-medium text-white hover:bg-blue-700 disabled:cursor-not-allowed disabled:opacity-50"
>
{registerBusy
? 'Registering...'
: `Register ${selectedDiscoveredTags.size} selected`}
</button>
{/if}
</div>
{/if}
<!-- Versions list -->
{#if versionsLoading}
<p class="text-sm text-gray-400">Loading versions...</p>
{:else if versions.length === 0}
<p class="text-sm text-gray-400">No versions registered. Add a tag above to get started.</p>
{:else}
<div class="divide-y divide-gray-100">
{#each versions as version (version.id)}
<div class="py-2.5">
<div class="flex items-center justify-between">
<div class="flex items-center gap-3">
<span class="font-mono text-sm font-medium text-gray-900">{version.tag}</span>
<span
class="rounded-full px-2 py-0.5 text-xs font-medium {stateColors[version.state] ??
'bg-gray-100 text-gray-600'}"
>
{stateLabels[version.state] ?? version.state}
</span>
</div>
<div class="flex items-center gap-2">
<button
onclick={() => handleIndexVersion(version.tag)}
disabled={version.state === 'indexing' || !!activeVersionJobs[version.tag]}
class="rounded-lg border border-blue-200 px-3 py-1 text-xs font-medium text-blue-600 hover:bg-blue-50 disabled:cursor-not-allowed disabled:opacity-50"
>
{version.state === 'indexing' || !!activeVersionJobs[version.tag] ? 'Indexing...' : 'Index'}
</button>
<button
onclick={() => (removeTag = version.tag)}
class="rounded-lg border border-red-100 px-3 py-1 text-xs font-medium text-red-500 hover:bg-red-50"
>
Remove
</button>
</div>
</div>
{#if !!activeVersionJobs[version.tag]}
<IndexingProgress
jobId={activeVersionJobs[version.tag]!}
oncomplete={() => {
const { [version.tag]: _, ...rest } = activeVersionJobs;
activeVersionJobs = rest;
loadVersions();
refreshRepo();
}}
/>
{/if}
</div>
{/each}
</div>
{/if}
</div>
<!-- Metadata -->
<div class="mt-6 rounded-xl border border-gray-200 bg-white p-5">
<h2 class="mb-3 text-sm font-semibold text-gray-700">Repository Info</h2>
@@ -210,36 +564,6 @@
</dl>
</div>
<!-- Versions -->
{#if versions.length > 0}
<div class="mt-6 rounded-xl border border-gray-200 bg-white p-5">
<h2 class="mb-3 text-sm font-semibold text-gray-700">Indexed Versions</h2>
<div class="divide-y divide-gray-100">
{#each versions as version (version.id)}
<div class="flex items-center justify-between py-2.5">
<div>
<span class="font-mono text-sm font-medium text-gray-900">{version.tag}</span>
{#if version.title}
<span class="ml-2 text-sm text-gray-500">{version.title}</span>
{/if}
</div>
<div class="flex items-center gap-3">
<span
class="rounded-full px-2 py-0.5 text-xs {stateColors[version.state] ??
'bg-gray-100 text-gray-600'}"
>
{stateLabels[version.state] ?? version.state}
</span>
{#if version.indexedAt}
<span class="text-xs text-gray-400">{formatDate(version.indexedAt)}</span>
{/if}
</div>
</div>
{/each}
</div>
</div>
{/if}
<!-- Recent Jobs -->
{#if recentJobs.length > 0}
<div class="mt-6 rounded-xl border border-gray-200 bg-white p-5">
@@ -287,3 +611,14 @@
onCancel={() => (showDeleteConfirm = false)}
/>
{/if}
{#if removeTag}
<ConfirmDialog
title="Remove Version"
message="Remove version '{removeTag}'? This will delete all indexed snippets for this version."
confirmLabel="Remove"
danger={true}
onConfirm={handleRemoveVersion}
onCancel={() => (removeTag = null)}
/>
{/if}

View File

@@ -0,0 +1,42 @@
import { describe, expect, it, vi } from 'vitest';
import { load } from './+page.server';
describe('/repos/[id] page server load', () => {
it('decodes the route param once before calling downstream APIs', async () => {
const fetch = vi
.fn()
.mockResolvedValueOnce({
ok: true,
status: 200,
json: async () => ({
id: '/facebook/react',
title: 'React',
indexedVersions: ['main', 'v18.3.0']
})
})
.mockResolvedValueOnce({
ok: true,
status: 200,
json: async () => ({ jobs: [{ id: 'job-1', repositoryId: '/facebook/react' }] })
});
const result = await load({
fetch,
params: { id: encodeURIComponent('/facebook/react') }
} as never);
expect(fetch).toHaveBeenNthCalledWith(1, '/api/v1/libs/%2Ffacebook%2Freact');
expect(fetch).toHaveBeenNthCalledWith(
2,
'/api/v1/jobs?repositoryId=%2Ffacebook%2Freact&limit=5'
);
expect(result).toEqual({
repo: {
id: '/facebook/react',
title: 'React',
indexedVersions: ['main', 'v18.3.0']
},
recentJobs: [{ id: 'job-1', repositoryId: '/facebook/react' }]
});
});
});

View File

@@ -0,0 +1,33 @@
import { readdirSync } from 'node:fs';
import { join } from 'node:path';
import { describe, expect, it } from 'vitest';
function collectReservedRouteTestFiles(directory: string): string[] {
const entries = readdirSync(directory, { withFileTypes: true });
const reservedTestFiles: string[] = [];
for (const entry of entries) {
const entryPath = join(directory, entry.name);
if (entry.isDirectory()) {
reservedTestFiles.push(...collectReservedRouteTestFiles(entryPath));
continue;
}
if (!entry.name.startsWith('+')) continue;
if (!entry.name.includes('.test.') && !entry.name.includes('.spec.')) continue;
reservedTestFiles.push(entryPath);
}
return reservedTestFiles;
}
describe('SvelteKit route file conventions', () => {
it('does not place test files in reserved +prefixed route filenames', () => {
const routeDirectory = import.meta.dirname;
const reservedTestFiles = collectReservedRouteTestFiles(routeDirectory);
expect(reservedTestFiles).toEqual([]);
});
});

View File

@@ -0,0 +1,22 @@
import type { PageServerLoad } from './$types';
import { getClient } from '$lib/server/db/client.js';
import { LocalEmbeddingProvider } from '$lib/server/embeddings/local.provider.js';
import { EmbeddingSettingsDtoMapper } from '$lib/server/mappers/embedding-settings.dto.mapper.js';
import { EmbeddingSettingsService } from '$lib/server/services/embedding-settings.service.js';
export const load: PageServerLoad = async () => {
const service = new EmbeddingSettingsService(getClient());
const settings = EmbeddingSettingsDtoMapper.toDto(service.getSettings());
let localProviderAvailable = false;
try {
localProviderAvailable = await new LocalEmbeddingProvider().isAvailable();
} catch {
localProviderAvailable = false;
}
return {
settings,
localProviderAvailable
};
};

View File

@@ -1,5 +1,12 @@
<script lang="ts">
import { onDestroy } from 'svelte';
import StatBadge from '$lib/components/StatBadge.svelte';
import type {
EmbeddingProfileDto,
EmbeddingSettingsDto,
EmbeddingSettingsUpdateDto
} from '$lib/dtos/embedding-settings';
import type { PageProps } from './$types';
// ---------------------------------------------------------------------------
// Provider presets
@@ -30,11 +37,25 @@
// State
// ---------------------------------------------------------------------------
let provider = $state<'none' | 'openai' | 'local'>('none');
let baseUrl = $state('https://api.openai.com/v1');
let { data }: PageProps = $props();
function getInitialSettings(): EmbeddingSettingsDto {
return data.settings;
}
function getInitialLocalProviderAvailability(): boolean {
return data.localProviderAvailable;
}
let settingsOverride = $state<EmbeddingSettingsDto | null>(null);
let provider = $state<'none' | 'openai' | 'local'>(
resolveProvider(getInitialSettings().activeProfile)
);
let baseUrl = $state(resolveBaseUrl(getInitialSettings()));
let apiKey = $state('');
let model = $state('text-embedding-3-small');
let dimensions = $state<number | undefined>(1536);
let model = $state(resolveModel(getInitialSettings()));
let dimensions = $state<number | undefined>(resolveDimensions(getInitialSettings()));
let openaiProfileId = $state(resolveOpenAiProfileId(getInitialSettings()));
let testStatus = $state<'idle' | 'testing' | 'ok' | 'error'>('idle');
let testError = $state<string | null>(null);
@@ -45,51 +66,12 @@
let saveError = $state<string | null>(null);
let saveStatusTimer: ReturnType<typeof setTimeout> | null = null;
let localAvailable = $state<boolean | null>(null);
let loading = $state(true);
const currentSettings = $derived(settingsOverride ?? data.settings);
const activeProfile = $derived(currentSettings.activeProfile);
const activeConfigEntries = $derived(activeProfile?.configEntries ?? []);
// ---------------------------------------------------------------------------
// Load current config + probe local provider on mount
// ---------------------------------------------------------------------------
$effect(() => {
let cancelled = false;
(async () => {
try {
const res = await fetch('/api/v1/settings/embedding');
if (!cancelled && res.ok) {
const data = await res.json();
provider = data.provider ?? 'none';
if (data.openai) {
baseUrl = data.openai.baseUrl ?? baseUrl;
model = data.openai.model ?? model;
dimensions = data.openai.dimensions ?? dimensions;
// apiKey is intentionally not returned by the server; leave blank
}
}
} catch {
// Non-fatal — fall back to defaults
} finally {
if (!cancelled) loading = false;
}
// Probe whether the local provider is available
try {
const res = await fetch('/api/v1/settings/embedding/test');
if (!cancelled && res.ok) {
const data = await res.json();
localAvailable = data.available ?? false;
}
} catch {
if (!cancelled) localAvailable = false;
}
})();
return () => {
cancelled = true;
onDestroy(() => {
if (saveStatusTimer) clearTimeout(saveStatusTimer);
};
});
// ---------------------------------------------------------------------------
@@ -106,6 +88,10 @@
}
async function testConnection() {
if (provider !== 'openai') {
return;
}
testStatus = 'testing';
testError = null;
testDimensions = null;
@@ -113,7 +99,14 @@
const res = await fetch('/api/v1/settings/embedding/test', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ provider, openai: { baseUrl, apiKey, model, dimensions } })
body: JSON.stringify({
id: openaiProfileId,
title: 'OpenAI-compatible',
providerKind: 'openai-compatible',
model,
dimensions: dimensions ?? 1536,
config: { baseUrl, apiKey, model, ...(dimensions ? { dimensions } : {}) }
})
});
if (res.ok) {
const data = await res.json();
@@ -138,9 +131,10 @@
const res = await fetch('/api/v1/settings/embedding', {
method: 'PUT',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ provider, openai: { baseUrl, apiKey, model, dimensions } })
body: JSON.stringify(buildSaveRequest())
});
if (res.ok) {
settingsOverride = (await res.json()) as EmbeddingSettingsDto;
saveStatus = 'ok';
if (saveStatusTimer) clearTimeout(saveStatusTimer);
saveStatusTimer = setTimeout(() => {
@@ -164,6 +158,74 @@
event.preventDefault();
void save();
}
function getOpenAiProfile(settings: EmbeddingSettingsDto): EmbeddingProfileDto | null {
return settings.profiles.find((profile) => profile.providerKind === 'openai-compatible') ?? null;
}
function resolveProvider(profile: EmbeddingProfileDto | null): 'none' | 'openai' | 'local' {
if (!profile) return 'none';
if (profile.providerKind === 'local-transformers') return 'local';
if (profile.providerKind === 'openai-compatible') return 'openai';
return 'none';
}
function resolveBaseUrl(settings: EmbeddingSettingsDto): string {
const profile = settings.activeProfile?.providerKind === 'openai-compatible'
? settings.activeProfile
: getOpenAiProfile(settings);
return typeof profile?.config.baseUrl === 'string'
? profile.config.baseUrl
: 'https://api.openai.com/v1';
}
function resolveModel(settings: EmbeddingSettingsDto): string {
const profile = settings.activeProfile?.providerKind === 'openai-compatible'
? settings.activeProfile
: getOpenAiProfile(settings);
return typeof profile?.config.model === 'string'
? profile.config.model
: profile?.model ?? 'text-embedding-3-small';
}
function resolveDimensions(settings: EmbeddingSettingsDto): number | undefined {
const profile = settings.activeProfile?.providerKind === 'openai-compatible'
? settings.activeProfile
: getOpenAiProfile(settings);
return profile?.dimensions ?? 1536;
}
function resolveOpenAiProfileId(settings: EmbeddingSettingsDto): string {
const profile = getOpenAiProfile(settings);
return profile?.id ?? 'openai-default';
}
function buildSaveRequest(): EmbeddingSettingsUpdateDto {
if (provider === 'none') {
return { activeProfileId: null };
}
if (provider === 'local') {
return { activeProfileId: 'local-default' };
}
return {
activeProfileId: openaiProfileId,
profile: {
id: openaiProfileId,
providerKind: 'openai-compatible',
title: 'OpenAI-compatible',
model,
dimensions: dimensions ?? 1536,
config: { baseUrl, apiKey, model, ...(dimensions ? { dimensions } : {}) }
}
};
}
function formatTimestamp(timestamp: number): string {
const normalizedTimestamp = timestamp > 1_000_000_000_000 ? timestamp : timestamp * 1000;
return new Date(normalizedTimestamp).toLocaleString();
}
</script>
<svelte:head>
@@ -175,16 +237,108 @@
<p class="mt-0.5 text-sm text-gray-500">Configure TrueRef embedding and indexing options</p>
</div>
<!-- Embedding Provider Card -->
<div class="mb-4 grid gap-4 lg:grid-cols-[1.2fr_0.8fr]">
<div class="rounded-xl border border-gray-200 bg-white p-6">
<h2 class="mb-1 text-base font-semibold text-gray-900">Current Active Profile</h2>
<p class="mb-4 text-sm text-gray-500">
This is the profile used for semantic indexing and retrieval right now.
</p>
{#if activeProfile}
<div class="grid gap-4 md:grid-cols-2">
<div class="space-y-4">
<div>
<p class="text-lg font-semibold text-gray-900">{activeProfile.title}</p>
<p class="mt-1 text-sm text-gray-500">Profile ID: {activeProfile.id}</p>
</div>
<dl class="rounded-lg border border-gray-200 bg-gray-50 p-4 text-sm">
<div class="grid grid-cols-[110px_1fr] gap-x-4 gap-y-1 border-b border-gray-200 pb-3">
<dt class="font-medium text-gray-500">Provider</dt>
<dd class="font-semibold text-gray-900">{activeProfile.providerKind}</dd>
<dt class="font-medium text-gray-500">Model</dt>
<dd class="break-all font-semibold text-gray-900">{activeProfile.model}</dd>
<dt class="font-medium text-gray-500">Dimensions</dt>
<dd class="font-semibold text-gray-900">{activeProfile.dimensions}</dd>
</div>
<div class="grid grid-cols-[110px_1fr] gap-x-4 gap-y-2 pt-3">
<dt class="text-gray-500">Enabled</dt>
<dd class="font-medium text-gray-800">{activeProfile.enabled ? 'Yes' : 'No'}</dd>
<dt class="text-gray-500">Default</dt>
<dd class="font-medium text-gray-800">{activeProfile.isDefault ? 'Yes' : 'No'}</dd>
<dt class="text-gray-500">Updated</dt>
<dd class="font-medium text-gray-800">{formatTimestamp(activeProfile.updatedAt)}</dd>
</div>
</dl>
</div>
<div class="rounded-lg border border-gray-200 bg-gray-50 p-4">
<p class="text-sm font-medium text-gray-800">Provider configuration</p>
<p class="mb-3 mt-1 text-sm text-gray-500">
These are the provider-specific settings currently saved for the active profile.
</p>
{#if activeConfigEntries.length > 0}
<ul class="space-y-2 text-sm">
{#each activeConfigEntries as entry (entry.key)}
<li class="flex items-start justify-between gap-4 border-b border-gray-200 pb-2 last:border-b-0 last:pb-0">
<span class="font-medium text-gray-600">{entry.key}</span>
<span class={entry.redacted ? 'text-gray-500' : 'text-gray-800'}>{entry.value}</span>
</li>
{/each}
</ul>
{:else}
<p class="text-sm text-gray-500">
No provider-specific configuration is stored for this profile.
</p>
<p class="mt-2 text-sm text-gray-500">
For <span class="font-medium text-gray-700">OpenAI-compatible</span> profiles, edit the
settings in the <span class="font-medium text-gray-700">Embedding Provider</span> form
below. The built-in <span class="font-medium text-gray-700">Local Model</span> profile
does not currently expose extra configurable fields.
</p>
{/if}
</div>
</div>
{:else}
<div class="rounded-lg border border-amber-200 bg-amber-50 p-4 text-sm text-amber-800">
Embeddings are currently disabled. Keyword search remains available, but no embedding profile is active.
</div>
{/if}
</div>
<div class="rounded-xl border border-gray-200 bg-white p-6">
<h2 class="mb-1 text-base font-semibold text-gray-900">Profile Inventory</h2>
<p class="mb-4 text-sm text-gray-500">Profiles stored in the database and available for activation.</p>
<div class="grid grid-cols-2 gap-3">
<StatBadge label="Profiles" value={String(currentSettings.profiles.length)} />
<StatBadge label="Active" value={activeProfile ? '1' : '0'} />
</div>
<div class="mt-4 space-y-2">
{#each currentSettings.profiles as profile (profile.id)}
<div class="rounded-lg border border-gray-200 px-3 py-2 text-sm">
<div class="flex items-center justify-between gap-3">
<div>
<p class="font-medium text-gray-900">{profile.title}</p>
<p class="text-gray-500">{profile.id}</p>
</div>
{#if profile.id === currentSettings.activeProfileId}
<span class="rounded-full bg-blue-50 px-2 py-0.5 text-xs font-medium text-blue-700">Active</span>
{/if}
</div>
</div>
{/each}
</div>
</div>
</div>
<div class="rounded-xl border border-gray-200 bg-white p-6">
<h2 class="mb-1 text-base font-semibold text-gray-900">Embedding Provider</h2>
<p class="mb-4 text-sm text-gray-500">
Embeddings enable semantic search. Without them, only keyword search (FTS5) is used.
</p>
{#if loading}
<p class="text-sm text-gray-400">Loading current configuration…</p>
{:else}
<form class="space-y-4" onsubmit={handleSubmit}>
<!-- Provider selector -->
<div class="mb-4 flex gap-2">
@@ -314,9 +468,7 @@
<div class="rounded-lg border border-gray-200 bg-gray-50 p-4 text-sm">
<p class="font-medium text-gray-800">Local ONNX model via @xenova/transformers</p>
<p class="mt-1 text-gray-500">Model: Xenova/all-MiniLM-L6-v2 · 384 dimensions</p>
{#if localAvailable === null}
<p class="mt-2 text-gray-400">Checking availability…</p>
{:else if localAvailable}
{#if getInitialLocalProviderAvailability()}
<p class="mt-2 text-green-600">@xenova/transformers is installed and ready.</p>
{:else}
<p class="mt-2 text-amber-700">
@@ -382,7 +534,6 @@
</button>
</div>
</form>
{/if}
</div>
<!-- About card -->

View File

@@ -0,0 +1,103 @@
import { beforeEach, describe, expect, it, vi } from 'vitest';
import Database from 'better-sqlite3';
import { readFileSync } from 'node:fs';
import { join } from 'node:path';
let db: Database.Database;
vi.mock('$lib/server/db/client.js', () => ({
getClient: () => db
}));
vi.mock('$lib/server/embeddings/local.provider.js', () => ({
LocalEmbeddingProvider: class {
async isAvailable() {
return true;
}
}
}));
import { load } from './+page.server.js';
function createTestDb(): Database.Database {
const client = new Database(':memory:');
client.pragma('foreign_keys = ON');
const migrationsFolder = join(import.meta.dirname, '../../lib/server/db/migrations');
const ftsFile = join(import.meta.dirname, '../../lib/server/db/fts.sql');
for (const migration of [
'0000_large_master_chief.sql',
'0001_quick_nighthawk.sql',
'0002_silky_stellaris.sql'
]) {
const statements = readFileSync(join(migrationsFolder, migration), 'utf-8')
.split('--> statement-breakpoint')
.map((statement) => statement.trim())
.filter(Boolean);
for (const statement of statements) {
client.exec(statement);
}
}
client.exec(readFileSync(ftsFile, 'utf-8'));
return client;
}
describe('/settings page server load', () => {
beforeEach(() => {
db = createTestDb();
});
it('returns the active profile and local provider availability', async () => {
db.prepare(
`INSERT INTO embedding_profiles
(id, provider_kind, title, enabled, is_default, model, dimensions, config, created_at, updated_at)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`
).run(
'openai-default',
'openai-compatible',
'OpenAI-compatible',
1,
1,
'text-embedding-3-small',
1536,
JSON.stringify({
baseUrl: 'https://api.openai.com/v1',
apiKey: 'sk-test',
model: 'text-embedding-3-small'
}),
1710000000,
1710000000
);
db.prepare('UPDATE embedding_profiles SET is_default = 0 WHERE id = ?').run('local-default');
const result = (await load({} as never)) as {
localProviderAvailable: boolean;
settings: {
activeProfileId: string | null;
activeProfile: {
config: Record<string, unknown>;
configEntries: Array<{ key: string; value: string; redacted: boolean }>;
} | null;
};
};
expect(result.localProviderAvailable).toBe(true);
expect(result.settings.activeProfileId).toBe('openai-default');
expect(result.settings.activeProfile).toMatchObject({
id: 'openai-default',
providerKind: 'openai-compatible'
});
expect(result.settings.activeProfile?.config).toEqual({
baseUrl: 'https://api.openai.com/v1',
model: 'text-embedding-3-small'
});
expect(result.settings.activeProfile?.configEntries).toEqual(
expect.arrayContaining([
expect.objectContaining({ key: 'apiKey', value: '[redacted]', redacted: true })
])
);
});
});

View File

@@ -1,632 +0,0 @@
> trueref@0.0.1 test:unit
> vitest
 DEV  v4.1.0 /home/moze/Sources/trueref
19:10:26 [vite] (client) Re-optimizing dependencies because lockfile has changed
  server  src/lib/server/embeddings/embedding.service.test.ts (0 test)
✓  server  src/lib/server/parser/code.parser.test.ts (20 tests) 22ms
✓  server  src/lib/server/services/version.service.test.ts (19 tests) 37ms
✓  server  src/lib/server/services/repository.service.test.ts (37 tests) 57ms
stderr | src/lib/server/crawler/local.crawler.test.ts > LocalCrawler.crawl() — config file detection > gracefully handles a malformed config file
[LocalCrawler] Failed to parse config file: /tmp/trueref-test-ptITIP/trueref.json
✓  server  src/lib/server/config/config-parser.test.ts (50 tests) 21ms
stderr | src/lib/server/pipeline/indexing.pipeline.test.ts > IndexingPipeline > marks job as failed and repo as error when pipeline throws
[IndexingPipeline] Job c44d7e22-6127-49e7-82b7-eb724726c888 failed: crawl failed
stderr | src/lib/server/pipeline/indexing.pipeline.test.ts
[JobQueue] No pipeline configured — cannot process jobs.
stderr | src/lib/server/pipeline/indexing.pipeline.test.ts
[JobQueue] No pipeline configured — cannot process jobs.
stderr | src/lib/server/pipeline/indexing.pipeline.test.ts
[JobQueue] No pipeline configured — cannot process jobs.
✓  server  src/lib/server/search/search.service.test.ts (43 tests) 43ms
✓  server  src/lib/server/pipeline/indexing.pipeline.test.ts (20 tests) 42ms
✓  server  src/lib/server/crawler/gitignore-parser.test.ts (29 tests) 11ms
✓  server  src/lib/server/crawler/github-tags.test.ts (10 tests) 9ms
✓  server  src/routes/api/v1/api-contract.integration.test.ts (4 tests) 48ms
  server  src/lib/server/db/schema.test.ts (19 tests | 19 failed) 50ms
 × inserts and retrieves a repository 12ms
 × allows nullable optional fields 3ms
 × supports all state enum values 2ms
 × inserts a version linked to a repository 4ms
 × cascades delete when parent repository is deleted 2ms
 × inserts a document 1ms
 × cascades delete when repository is deleted 2ms
 × inserts a code snippet 2ms
 × inserts an info snippet 2ms
 × cascades delete when document is deleted 2ms
 × stores a Float32Array embedding as blob 2ms
 × cascades delete when snippet is deleted 2ms
 × creates a job with default queued status 2ms
 × supports all status enum values 2ms
 × stores JSON array fields correctly 2ms
 × stores and retrieves key-value settings 2ms
 × FTS table exists and is queryable 1ms
 × insert trigger keeps FTS in sync 2ms
 × delete trigger removes entry from FTS 2ms
  server  src/lib/server/search/hybrid.search.service.test.ts (33 tests | 16 failed) 52ms
✓ returns 1.0 for identical vectors 2ms
✓ returns 0.0 for orthogonal vectors 0ms
✓ returns -1.0 for opposite vectors 0ms
✓ returns 0 for zero-magnitude vector 0ms
✓ throws when dimensions do not match 1ms
✓ computes correct similarity for non-trivial vectors 0ms
✓ returns empty array for empty inputs 1ms
✓ fuses a single list preserving order 1ms
✓ deduplicates items appearing in multiple lists 0ms
✓ boosts items appearing in multiple lists 0ms
✓ assigns higher rrfScore to higher-ranked items 0ms
✓ handles three lists correctly 0ms
✓ produces positive rrfScores 0ms
 × returns empty array when no embeddings exist 10ms
 × returns results sorted by descending cosine similarity 2ms
 × respects the limit parameter 4ms
 × only returns snippets from the specified repository 2ms
 × handles embeddings with negative values 1ms
✓ returns FTS5 results when embeddingProvider is null 2ms
✓ returns FTS5 results when alpha = 0 1ms
✓ returns empty array when FTS5 query is blank and no provider 1ms
✓ falls back to FTS5 when noop provider returns empty embeddings 2ms
 × returns results when hybrid mode is active (alpha = 0.5) 1ms
 × deduplicates snippets appearing in both FTS5 and vector results 1ms
 × respects the limit option 1ms
 × returns vector-ranked results when alpha = 1 1ms
 × results include snippet and repository metadata 1ms
 × all results belong to the requested repository 1ms
 × filters by snippet type when provided 1ms
 × uses alpha = 0.5 when not specified 1ms
 × filters by versionId — excludes snippets from other versions 3ms
 × searchMode=keyword never calls provider.embed() 3ms
 × searchMode=semantic uses only vector search 2ms
✓  server  src/lib/server/api/formatters.test.ts (20 tests) 9ms
✓  server  src/lib/server/pipeline/diff.test.ts (9 tests) 8ms
✓  server  src/lib/server/api/library-id.test.ts (8 tests) 6ms
✓  server  src/lib/server/api/token-budget.test.ts (7 tests) 6ms
✓  server  src/lib/server/parser/markdown.parser.test.ts (14 tests) 9ms
✓  server  src/lib/vitest-examples/greet.spec.ts (1 test) 3ms
✓  server  src/lib/server/crawler/local.crawler.test.ts (50 tests) 658ms
✓  server  src/mcp/index.test.ts (7 tests) 985ms
✓  client (chromium)  src/lib/vitest-examples/Welcome.svelte.spec.ts (1 test) 9ms
stderr | src/lib/server/crawler/github.crawler.test.ts > crawl() > skips files that fail to download without throwing
[GitHubCrawler] Could not download: src/index.ts — skipping.
✓  server  src/lib/server/crawler/github.crawler.test.ts (50 tests) 6082ms
✓ retries on failure and returns eventual success  3003ms
✓ throws after exhausting all attempts  3003ms
⎯⎯⎯⎯⎯⎯ Failed Suites 1 ⎯⎯⎯⎯⎯⎯⎯
 FAIL   server  src/lib/server/embeddings/embedding.service.test.ts [ src/lib/server/embeddings/embedding.service.test.ts ]
Error: Transform failed with 1 error:
/home/moze/Sources/trueref/src/lib/server/embeddings/embedding.service.test.ts:408:2: ERROR: "await" can only be used inside an "async" function
Plugin: vite:esbuild
File: /home/moze/Sources/trueref/src/lib/server/embeddings/embedding.service.test.ts:408:2

"await" can only be used inside an "async" function
406 | });
407 |
408 | await service.embedSnippets([snippetId]);
| ^
409 |
410 | const retrieved = service.getEmbedding(snippetId);

  failureErrorWithLog node_modules/vite/node_modules/esbuild/lib/main.js:1748:15
  node_modules/vite/node_modules/esbuild/lib/main.js:1017:50
  responseCallbacks.<computed> node_modules/vite/node_modules/esbuild/lib/main.js:884:9
  handleIncomingPacket node_modules/vite/node_modules/esbuild/lib/main.js:939:12
  Socket.readFromStdout node_modules/vite/node_modules/esbuild/lib/main.js:862:7
⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯[1/36]⎯
⎯⎯⎯⎯⎯⎯ Failed Tests 35 ⎯⎯⎯⎯⎯⎯⎯
 FAIL   server  src/lib/server/db/schema.test.ts > repositories table > inserts and retrieves a repository
 FAIL   server  src/lib/server/db/schema.test.ts > repositories table > allows nullable optional fields
 FAIL   server  src/lib/server/db/schema.test.ts > repositories table > supports all state enum values
DrizzleError: Failed to run the query '
INSERT INTO `__new_snippet_embeddings`("snippet_id", "profile_id", "model", "dimensions", "embedding", "created_at") SELECT "snippet_id", "profile_id", "model", "dimensions", "embedding", "created_at" FROM `snippet_embeddings`;'
  BetterSQLiteSession.run node_modules/src/sqlite-core/session.ts:271:9
  SQLiteSyncDialect.migrate node_modules/src/sqlite-core/dialect.ts:864:14
  migrate node_modules/src/better-sqlite3/migrator.ts:10:12
  createTestDb src/lib/server/db/schema.test.ts:32:2
 30| // Run migrations from the generated migration folder.
 31| const migrationsFolder = join(import.meta.dirname, 'migrations');
 32| migrate(db, { migrationsFolder });
 | ^
 33|
 34| // Apply FTS5 DDL using exec() which handles multi-statement SQL with…
  src/lib/server/db/schema.test.ts:63:13
Caused by: SqliteError: no such column: "profile_id" - should this be a string literal in single-quotes?
  Database.prepare node_modules/better-sqlite3/lib/methods/wrappers.js:5:21
  BetterSQLiteSession.prepareQuery node_modules/drizzle-orm/better-sqlite3/session.js:23:30
  BetterSQLiteSession.prepareOneTimeQuery node_modules/drizzle-orm/sqlite-core/session.js:141:17
  BetterSQLiteSession.run node_modules/drizzle-orm/sqlite-core/session.js:154:19
  SQLiteSyncDialect.migrate node_modules/drizzle-orm/sqlite-core/dialect.js:604:21
  migrate node_modules/drizzle-orm/better-sqlite3/migrator.js:4:14
  createTestDb src/lib/server/db/schema.test.ts:32:2
  src/lib/server/db/schema.test.ts:63:13
⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯
Serialized Error: { code: 'SQLITE_ERROR' }
⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯[2/36]⎯
 FAIL   server  src/lib/server/db/schema.test.ts > repository_versions table > inserts a version linked to a repository
 FAIL   server  src/lib/server/db/schema.test.ts > repository_versions table > cascades delete when parent repository is deleted
DrizzleError: Failed to run the query '
INSERT INTO `__new_snippet_embeddings`("snippet_id", "profile_id", "model", "dimensions", "embedding", "created_at") SELECT "snippet_id", "profile_id", "model", "dimensions", "embedding", "created_at" FROM `snippet_embeddings`;'
  BetterSQLiteSession.run node_modules/src/sqlite-core/session.ts:271:9
  SQLiteSyncDialect.migrate node_modules/src/sqlite-core/dialect.ts:864:14
  migrate node_modules/src/better-sqlite3/migrator.ts:10:12
  createTestDb src/lib/server/db/schema.test.ts:32:2
 30| // Run migrations from the generated migration folder.
 31| const migrationsFolder = join(import.meta.dirname, 'migrations');
 32| migrate(db, { migrationsFolder });
 | ^
 33|
 34| // Apply FTS5 DDL using exec() which handles multi-statement SQL with…
  src/lib/server/db/schema.test.ts:109:13
Caused by: SqliteError: no such column: "profile_id" - should this be a string literal in single-quotes?
  Database.prepare node_modules/better-sqlite3/lib/methods/wrappers.js:5:21
  BetterSQLiteSession.prepareQuery node_modules/drizzle-orm/better-sqlite3/session.js:23:30
  BetterSQLiteSession.prepareOneTimeQuery node_modules/drizzle-orm/sqlite-core/session.js:141:17
  BetterSQLiteSession.run node_modules/drizzle-orm/sqlite-core/session.js:154:19
  SQLiteSyncDialect.migrate node_modules/drizzle-orm/sqlite-core/dialect.js:604:21
  migrate node_modules/drizzle-orm/better-sqlite3/migrator.js:4:14
  createTestDb src/lib/server/db/schema.test.ts:32:2
  src/lib/server/db/schema.test.ts:109:13
⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯
Serialized Error: { code: 'SQLITE_ERROR' }
⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯[3/36]⎯
 FAIL   server  src/lib/server/db/schema.test.ts > documents table > inserts a document
 FAIL   server  src/lib/server/db/schema.test.ts > documents table > cascades delete when repository is deleted
DrizzleError: Failed to run the query '
INSERT INTO `__new_snippet_embeddings`("snippet_id", "profile_id", "model", "dimensions", "embedding", "created_at") SELECT "snippet_id", "profile_id", "model", "dimensions", "embedding", "created_at" FROM `snippet_embeddings`;'
  BetterSQLiteSession.run node_modules/src/sqlite-core/session.ts:271:9
  SQLiteSyncDialect.migrate node_modules/src/sqlite-core/dialect.ts:864:14
  migrate node_modules/src/better-sqlite3/migrator.ts:10:12
  createTestDb src/lib/server/db/schema.test.ts:32:2
 30| // Run migrations from the generated migration folder.
 31| const migrationsFolder = join(import.meta.dirname, 'migrations');
 32| migrate(db, { migrationsFolder });
 | ^
 33|
 34| // Apply FTS5 DDL using exec() which handles multi-statement SQL with…
  src/lib/server/db/schema.test.ts:151:13
Caused by: SqliteError: no such column: "profile_id" - should this be a string literal in single-quotes?
  Database.prepare node_modules/better-sqlite3/lib/methods/wrappers.js:5:21
  BetterSQLiteSession.prepareQuery node_modules/drizzle-orm/better-sqlite3/session.js:23:30
  BetterSQLiteSession.prepareOneTimeQuery node_modules/drizzle-orm/sqlite-core/session.js:141:17
  BetterSQLiteSession.run node_modules/drizzle-orm/sqlite-core/session.js:154:19
  SQLiteSyncDialect.migrate node_modules/drizzle-orm/sqlite-core/dialect.js:604:21
  migrate node_modules/drizzle-orm/better-sqlite3/migrator.js:4:14
  createTestDb src/lib/server/db/schema.test.ts:32:2
  src/lib/server/db/schema.test.ts:151:13
⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯
Serialized Error: { code: 'SQLITE_ERROR' }
⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯[4/36]⎯
 FAIL   server  src/lib/server/db/schema.test.ts > snippets table > inserts a code snippet
 FAIL   server  src/lib/server/db/schema.test.ts > snippets table > inserts an info snippet
 FAIL   server  src/lib/server/db/schema.test.ts > snippets table > cascades delete when document is deleted
DrizzleError: Failed to run the query '
INSERT INTO `__new_snippet_embeddings`("snippet_id", "profile_id", "model", "dimensions", "embedding", "created_at") SELECT "snippet_id", "profile_id", "model", "dimensions", "embedding", "created_at" FROM `snippet_embeddings`;'
  BetterSQLiteSession.run node_modules/src/sqlite-core/session.ts:271:9
  SQLiteSyncDialect.migrate node_modules/src/sqlite-core/dialect.ts:864:14
  migrate node_modules/src/better-sqlite3/migrator.ts:10:12
  createTestDb src/lib/server/db/schema.test.ts:32:2
 30| // Run migrations from the generated migration folder.
 31| const migrationsFolder = join(import.meta.dirname, 'migrations');
 32| migrate(db, { migrationsFolder });
 | ^
 33|
 34| // Apply FTS5 DDL using exec() which handles multi-statement SQL with…
  src/lib/server/db/schema.test.ts:195:13
Caused by: SqliteError: no such column: "profile_id" - should this be a string literal in single-quotes?
  Database.prepare node_modules/better-sqlite3/lib/methods/wrappers.js:5:21
  BetterSQLiteSession.prepareQuery node_modules/drizzle-orm/better-sqlite3/session.js:23:30
  BetterSQLiteSession.prepareOneTimeQuery node_modules/drizzle-orm/sqlite-core/session.js:141:17
  BetterSQLiteSession.run node_modules/drizzle-orm/sqlite-core/session.js:154:19
  SQLiteSyncDialect.migrate node_modules/drizzle-orm/sqlite-core/dialect.js:604:21
  migrate node_modules/drizzle-orm/better-sqlite3/migrator.js:4:14
  createTestDb src/lib/server/db/schema.test.ts:32:2
  src/lib/server/db/schema.test.ts:195:13
⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯
Serialized Error: { code: 'SQLITE_ERROR' }
⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯[5/36]⎯
 FAIL   server  src/lib/server/db/schema.test.ts > snippet_embeddings table > stores a Float32Array embedding as blob
 FAIL   server  src/lib/server/db/schema.test.ts > snippet_embeddings table > cascades delete when snippet is deleted
DrizzleError: Failed to run the query '
INSERT INTO `__new_snippet_embeddings`("snippet_id", "profile_id", "model", "dimensions", "embedding", "created_at") SELECT "snippet_id", "profile_id", "model", "dimensions", "embedding", "created_at" FROM `snippet_embeddings`;'
  BetterSQLiteSession.run node_modules/src/sqlite-core/session.ts:271:9
  SQLiteSyncDialect.migrate node_modules/src/sqlite-core/dialect.ts:864:14
  migrate node_modules/src/better-sqlite3/migrator.ts:10:12
  createTestDb src/lib/server/db/schema.test.ts:32:2
 30| // Run migrations from the generated migration folder.
 31| const migrationsFolder = join(import.meta.dirname, 'migrations');
 32| migrate(db, { migrationsFolder });
 | ^
 33|
 34| // Apply FTS5 DDL using exec() which handles multi-statement SQL with…
  src/lib/server/db/schema.test.ts:271:13
Caused by: SqliteError: no such column: "profile_id" - should this be a string literal in single-quotes?
  Database.prepare node_modules/better-sqlite3/lib/methods/wrappers.js:5:21
  BetterSQLiteSession.prepareQuery node_modules/drizzle-orm/better-sqlite3/session.js:23:30
  BetterSQLiteSession.prepareOneTimeQuery node_modules/drizzle-orm/sqlite-core/session.js:141:17
  BetterSQLiteSession.run node_modules/drizzle-orm/sqlite-core/session.js:154:19
  SQLiteSyncDialect.migrate node_modules/drizzle-orm/sqlite-core/dialect.js:604:21
  migrate node_modules/drizzle-orm/better-sqlite3/migrator.js:4:14
  createTestDb src/lib/server/db/schema.test.ts:32:2
  src/lib/server/db/schema.test.ts:271:13
⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯
Serialized Error: { code: 'SQLITE_ERROR' }
⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯[6/36]⎯
 FAIL   server  src/lib/server/db/schema.test.ts > indexing_jobs table > creates a job with default queued status
 FAIL   server  src/lib/server/db/schema.test.ts > indexing_jobs table > supports all status enum values
DrizzleError: Failed to run the query '
INSERT INTO `__new_snippet_embeddings`("snippet_id", "profile_id", "model", "dimensions", "embedding", "created_at") SELECT "snippet_id", "profile_id", "model", "dimensions", "embedding", "created_at" FROM `snippet_embeddings`;'
  BetterSQLiteSession.run node_modules/src/sqlite-core/session.ts:271:9
  SQLiteSyncDialect.migrate node_modules/src/sqlite-core/dialect.ts:864:14
  migrate node_modules/src/better-sqlite3/migrator.ts:10:12
  createTestDb src/lib/server/db/schema.test.ts:32:2
 30| // Run migrations from the generated migration folder.
 31| const migrationsFolder = join(import.meta.dirname, 'migrations');
 32| migrate(db, { migrationsFolder });
 | ^
 33|
 34| // Apply FTS5 DDL using exec() which handles multi-statement SQL with…
  src/lib/server/db/schema.test.ts:350:13
Caused by: SqliteError: no such column: "profile_id" - should this be a string literal in single-quotes?
  Database.prepare node_modules/better-sqlite3/lib/methods/wrappers.js:5:21
  BetterSQLiteSession.prepareQuery node_modules/drizzle-orm/better-sqlite3/session.js:23:30
  BetterSQLiteSession.prepareOneTimeQuery node_modules/drizzle-orm/sqlite-core/session.js:141:17
  BetterSQLiteSession.run node_modules/drizzle-orm/sqlite-core/session.js:154:19
  SQLiteSyncDialect.migrate node_modules/drizzle-orm/sqlite-core/dialect.js:604:21
  migrate node_modules/drizzle-orm/better-sqlite3/migrator.js:4:14
  createTestDb src/lib/server/db/schema.test.ts:32:2
  src/lib/server/db/schema.test.ts:350:13
⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯
Serialized Error: { code: 'SQLITE_ERROR' }
⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯[7/36]⎯
 FAIL   server  src/lib/server/db/schema.test.ts > repository_configs table > stores JSON array fields correctly
DrizzleError: Failed to run the query '
INSERT INTO `__new_snippet_embeddings`("snippet_id", "profile_id", "model", "dimensions", "embedding", "created_at") SELECT "snippet_id", "profile_id", "model", "dimensions", "embedding", "created_at" FROM `snippet_embeddings`;'
  BetterSQLiteSession.run node_modules/src/sqlite-core/session.ts:271:9
  SQLiteSyncDialect.migrate node_modules/src/sqlite-core/dialect.ts:864:14
  migrate node_modules/src/better-sqlite3/migrator.ts:10:12
  createTestDb src/lib/server/db/schema.test.ts:32:2
 30| // Run migrations from the generated migration folder.
 31| const migrationsFolder = join(import.meta.dirname, 'migrations');
 32| migrate(db, { migrationsFolder });
 | ^
 33|
 34| // Apply FTS5 DDL using exec() which handles multi-statement SQL with…
  src/lib/server/db/schema.test.ts:391:13
Caused by: SqliteError: no such column: "profile_id" - should this be a string literal in single-quotes?
  Database.prepare node_modules/better-sqlite3/lib/methods/wrappers.js:5:21
  BetterSQLiteSession.prepareQuery node_modules/drizzle-orm/better-sqlite3/session.js:23:30
  BetterSQLiteSession.prepareOneTimeQuery node_modules/drizzle-orm/sqlite-core/session.js:141:17
  BetterSQLiteSession.run node_modules/drizzle-orm/sqlite-core/session.js:154:19
  SQLiteSyncDialect.migrate node_modules/drizzle-orm/sqlite-core/dialect.js:604:21
  migrate node_modules/drizzle-orm/better-sqlite3/migrator.js:4:14
  createTestDb src/lib/server/db/schema.test.ts:32:2
  src/lib/server/db/schema.test.ts:391:13
⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯
Serialized Error: { code: 'SQLITE_ERROR' }
⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯[8/36]⎯
 FAIL   server  src/lib/server/db/schema.test.ts > settings table > stores and retrieves key-value settings
DrizzleError: Failed to run the query '
INSERT INTO `__new_snippet_embeddings`("snippet_id", "profile_id", "model", "dimensions", "embedding", "created_at") SELECT "snippet_id", "profile_id", "model", "dimensions", "embedding", "created_at" FROM `snippet_embeddings`;'
  BetterSQLiteSession.run node_modules/src/sqlite-core/session.ts:271:9
  SQLiteSyncDialect.migrate node_modules/src/sqlite-core/dialect.ts:864:14
  migrate node_modules/src/better-sqlite3/migrator.ts:10:12
  createTestDb src/lib/server/db/schema.test.ts:32:2
 30| // Run migrations from the generated migration folder.
 31| const migrationsFolder = join(import.meta.dirname, 'migrations');
 32| migrate(db, { migrationsFolder });
 | ^
 33|
 34| // Apply FTS5 DDL using exec() which handles multi-statement SQL with…
  src/lib/server/db/schema.test.ts:422:13
Caused by: SqliteError: no such column: "profile_id" - should this be a string literal in single-quotes?
  Database.prepare node_modules/better-sqlite3/lib/methods/wrappers.js:5:21
  BetterSQLiteSession.prepareQuery node_modules/drizzle-orm/better-sqlite3/session.js:23:30
  BetterSQLiteSession.prepareOneTimeQuery node_modules/drizzle-orm/sqlite-core/session.js:141:17
  BetterSQLiteSession.run node_modules/drizzle-orm/sqlite-core/session.js:154:19
  SQLiteSyncDialect.migrate node_modules/drizzle-orm/sqlite-core/dialect.js:604:21
  migrate node_modules/drizzle-orm/better-sqlite3/migrator.js:4:14
  createTestDb src/lib/server/db/schema.test.ts:32:2
  src/lib/server/db/schema.test.ts:422:13
⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯
Serialized Error: { code: 'SQLITE_ERROR' }
⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯[9/36]⎯
 FAIL   server  src/lib/server/db/schema.test.ts > FTS5 virtual table (snippets_fts) > FTS table exists and is queryable
 FAIL   server  src/lib/server/db/schema.test.ts > FTS5 virtual table (snippets_fts) > insert trigger keeps FTS in sync
 FAIL   server  src/lib/server/db/schema.test.ts > FTS5 virtual table (snippets_fts) > delete trigger removes entry from FTS
DrizzleError: Failed to run the query '
INSERT INTO `__new_snippet_embeddings`("snippet_id", "profile_id", "model", "dimensions", "embedding", "created_at") SELECT "snippet_id", "profile_id", "model", "dimensions", "embedding", "created_at" FROM `snippet_embeddings`;'
  BetterSQLiteSession.run node_modules/src/sqlite-core/session.ts:271:9
  SQLiteSyncDialect.migrate node_modules/src/sqlite-core/dialect.ts:864:14
  migrate node_modules/src/better-sqlite3/migrator.ts:10:12
  createTestDb src/lib/server/db/schema.test.ts:32:2
 30| // Run migrations from the generated migration folder.
 31| const migrationsFolder = join(import.meta.dirname, 'migrations');
 32| migrate(db, { migrationsFolder });
 | ^
 33|
 34| // Apply FTS5 DDL using exec() which handles multi-statement SQL with…
  src/lib/server/db/schema.test.ts:442:21
Caused by: SqliteError: no such column: "profile_id" - should this be a string literal in single-quotes?
  Database.prepare node_modules/better-sqlite3/lib/methods/wrappers.js:5:21
  BetterSQLiteSession.prepareQuery node_modules/drizzle-orm/better-sqlite3/session.js:23:30
  BetterSQLiteSession.prepareOneTimeQuery node_modules/drizzle-orm/sqlite-core/session.js:141:17
  BetterSQLiteSession.run node_modules/drizzle-orm/sqlite-core/session.js:154:19
  SQLiteSyncDialect.migrate node_modules/drizzle-orm/sqlite-core/dialect.js:604:21
  migrate node_modules/drizzle-orm/better-sqlite3/migrator.js:4:14
  createTestDb src/lib/server/db/schema.test.ts:32:2
  src/lib/server/db/schema.test.ts:442:21
⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯
Serialized Error: { code: 'SQLITE_ERROR' }
⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯[10/36]⎯
 FAIL   server  src/lib/server/search/hybrid.search.service.test.ts > VectorSearch > returns empty array when no embeddings exist
SqliteError: no such column: se.profile_id
  Database.prepare node_modules/better-sqlite3/lib/methods/wrappers.js:5:21
  VectorSearch.vectorSearch src/lib/server/search/vector.search.ts:100:24
 98| }
 99|
100| const rows = this.db.prepare<unknown[], RawEmbeddingRow>(sql).all(..…
 | ^
101|
102| const scored: VectorSearchResult[] = rows.map((row) => {
  src/lib/server/search/hybrid.search.service.test.ts:289:22
⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯[11/36]⎯
 FAIL   server  src/lib/server/search/hybrid.search.service.test.ts > VectorSearch > returns results sorted by descending cosine similarity
SqliteError: table snippet_embeddings has no column named profile_id
  Database.prepare node_modules/better-sqlite3/lib/methods/wrappers.js:5:21
  seedEmbedding src/lib/server/search/hybrid.search.service.test.ts:112:4
110| const f32 = new Float32Array(values);
111| client
112| .prepare(
 | ^
113| `INSERT OR REPLACE INTO snippet_embeddings
114| (snippet_id, profile_id, model, dimensions, embedding, create…
  src/lib/server/search/hybrid.search.service.test.ts:302:3
⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯[12/36]⎯
 FAIL   server  src/lib/server/search/hybrid.search.service.test.ts > VectorSearch > respects the limit parameter
SqliteError: table snippet_embeddings has no column named profile_id
  Database.prepare node_modules/better-sqlite3/lib/methods/wrappers.js:5:21
  seedEmbedding src/lib/server/search/hybrid.search.service.test.ts:112:4
110| const f32 = new Float32Array(values);
111| client
112| .prepare(
 | ^
113| `INSERT OR REPLACE INTO snippet_embeddings
114| (snippet_id, profile_id, model, dimensions, embedding, create…
  src/lib/server/search/hybrid.search.service.test.ts:321:4
⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯[13/36]⎯
 FAIL   server  src/lib/server/search/hybrid.search.service.test.ts > VectorSearch > only returns snippets from the specified repository
SqliteError: table snippet_embeddings has no column named profile_id
  Database.prepare node_modules/better-sqlite3/lib/methods/wrappers.js:5:21
  seedEmbedding src/lib/server/search/hybrid.search.service.test.ts:112:4
110| const f32 = new Float32Array(values);
111| client
112| .prepare(
 | ^
113| `INSERT OR REPLACE INTO snippet_embeddings
114| (snippet_id, profile_id, model, dimensions, embedding, create…
  src/lib/server/search/hybrid.search.service.test.ts:340:3
⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯[14/36]⎯
 FAIL   server  src/lib/server/search/hybrid.search.service.test.ts > VectorSearch > handles embeddings with negative values
SqliteError: table snippet_embeddings has no column named profile_id
  Database.prepare node_modules/better-sqlite3/lib/methods/wrappers.js:5:21
  seedEmbedding src/lib/server/search/hybrid.search.service.test.ts:112:4
110| const f32 = new Float32Array(values);
111| client
112| .prepare(
 | ^
113| `INSERT OR REPLACE INTO snippet_embeddings
114| (snippet_id, profile_id, model, dimensions, embedding, create…
  src/lib/server/search/hybrid.search.service.test.ts:352:3
⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯[15/36]⎯
 FAIL   server  src/lib/server/search/hybrid.search.service.test.ts > HybridSearchService > returns results when hybrid mode is active (alpha = 0.5)
SqliteError: table snippet_embeddings has no column named profile_id
  Database.prepare node_modules/better-sqlite3/lib/methods/wrappers.js:5:21
  seedEmbedding src/lib/server/search/hybrid.search.service.test.ts:112:4
110| const f32 = new Float32Array(values);
111| client
112| .prepare(
 | ^
113| `INSERT OR REPLACE INTO snippet_embeddings
114| (snippet_id, profile_id, model, dimensions, embedding, create…
  src/lib/server/search/hybrid.search.service.test.ts:430:3
⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯[16/36]⎯
 FAIL   server  src/lib/server/search/hybrid.search.service.test.ts > HybridSearchService > deduplicates snippets appearing in both FTS5 and vector results
SqliteError: table snippet_embeddings has no column named profile_id
  Database.prepare node_modules/better-sqlite3/lib/methods/wrappers.js:5:21
  seedEmbedding src/lib/server/search/hybrid.search.service.test.ts:112:4
110| const f32 = new Float32Array(values);
111| client
112| .prepare(
 | ^
113| `INSERT OR REPLACE INTO snippet_embeddings
114| (snippet_id, profile_id, model, dimensions, embedding, create…
  src/lib/server/search/hybrid.search.service.test.ts:449:3
⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯[17/36]⎯
 FAIL   server  src/lib/server/search/hybrid.search.service.test.ts > HybridSearchService > respects the limit option
SqliteError: table snippet_embeddings has no column named profile_id
  Database.prepare node_modules/better-sqlite3/lib/methods/wrappers.js:5:21
  seedEmbedding src/lib/server/search/hybrid.search.service.test.ts:112:4
110| const f32 = new Float32Array(values);
111| client
112| .prepare(
 | ^
113| `INSERT OR REPLACE INTO snippet_embeddings
114| (snippet_id, profile_id, model, dimensions, embedding, create…
  src/lib/server/search/hybrid.search.service.test.ts:471:4
⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯[18/36]⎯
 FAIL   server  src/lib/server/search/hybrid.search.service.test.ts > HybridSearchService > returns vector-ranked results when alpha = 1
SqliteError: table snippet_embeddings has no column named profile_id
  Database.prepare node_modules/better-sqlite3/lib/methods/wrappers.js:5:21
  seedEmbedding src/lib/server/search/hybrid.search.service.test.ts:112:4
110| const f32 = new Float32Array(values);
111| client
112| .prepare(
 | ^
113| `INSERT OR REPLACE INTO snippet_embeddings
114| (snippet_id, profile_id, model, dimensions, embedding, create…
  src/lib/server/search/hybrid.search.service.test.ts:503:3
⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯[19/36]⎯
 FAIL   server  src/lib/server/search/hybrid.search.service.test.ts > HybridSearchService > results include snippet and repository metadata
SqliteError: table snippet_embeddings has no column named profile_id
  Database.prepare node_modules/better-sqlite3/lib/methods/wrappers.js:5:21
  seedEmbedding src/lib/server/search/hybrid.search.service.test.ts:112:4
110| const f32 = new Float32Array(values);
111| client
112| .prepare(
 | ^
113| `INSERT OR REPLACE INTO snippet_embeddings
114| (snippet_id, profile_id, model, dimensions, embedding, create…
  src/lib/server/search/hybrid.search.service.test.ts:528:3
⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯[20/36]⎯
 FAIL   server  src/lib/server/search/hybrid.search.service.test.ts > HybridSearchService > all results belong to the requested repository
SqliteError: table snippet_embeddings has no column named profile_id
  Database.prepare node_modules/better-sqlite3/lib/methods/wrappers.js:5:21
  seedEmbedding src/lib/server/search/hybrid.search.service.test.ts:112:4
110| const f32 = new Float32Array(values);
111| client
112| .prepare(
 | ^
113| `INSERT OR REPLACE INTO snippet_embeddings
114| (snippet_id, profile_id, model, dimensions, embedding, create…
  src/lib/server/search/hybrid.search.service.test.ts:556:4
⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯[21/36]⎯
 FAIL   server  src/lib/server/search/hybrid.search.service.test.ts > HybridSearchService > filters by snippet type when provided
SqliteError: table snippet_embeddings has no column named profile_id
  Database.prepare node_modules/better-sqlite3/lib/methods/wrappers.js:5:21
  seedEmbedding src/lib/server/search/hybrid.search.service.test.ts:112:4
110| const f32 = new Float32Array(values);
111| client
112| .prepare(
 | ^
113| `INSERT OR REPLACE INTO snippet_embeddings
114| (snippet_id, profile_id, model, dimensions, embedding, create…
  src/lib/server/search/hybrid.search.service.test.ts:591:3
⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯[22/36]⎯
 FAIL   server  src/lib/server/search/hybrid.search.service.test.ts > HybridSearchService > uses alpha = 0.5 when not specified
SqliteError: table snippet_embeddings has no column named profile_id
  Database.prepare node_modules/better-sqlite3/lib/methods/wrappers.js:5:21
  seedEmbedding src/lib/server/search/hybrid.search.service.test.ts:112:4
110| const f32 = new Float32Array(values);
111| client
112| .prepare(
 | ^
113| `INSERT OR REPLACE INTO snippet_embeddings
114| (snippet_id, profile_id, model, dimensions, embedding, create…
  src/lib/server/search/hybrid.search.service.test.ts:616:3
⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯[23/36]⎯
 FAIL   server  src/lib/server/search/hybrid.search.service.test.ts > HybridSearchService > filters by versionId — excludes snippets from other versions
SqliteError: no such table: embedding_profiles
  Database.prepare node_modules/better-sqlite3/lib/methods/wrappers.js:5:21
  src/lib/server/search/hybrid.search.service.test.ts:647:5
645| // Create embedding profile
646| client
647| .prepare(
 | ^
648| `INSERT INTO embedding_profiles (id, provider_kind, title, enabled…
649| VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`
⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯[24/36]⎯
 FAIL   server  src/lib/server/search/hybrid.search.service.test.ts > HybridSearchService > searchMode=keyword never calls provider.embed()
SqliteError: table snippets_fts has no column named id
  Database.exec node_modules/better-sqlite3/lib/methods/wrappers.js:9:14
  src/lib/server/search/hybrid.search.service.test.ts:734:10
732| });
733|
734| client.exec(
 | ^
735| `INSERT INTO snippets_fts (id, repository_id, version_id, title, br…
736| VALUES ('${snippetId}', '${repoId}', NULL, NULL, NULL, 'keyword…
⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯[25/36]⎯
 FAIL   server  src/lib/server/search/hybrid.search.service.test.ts > HybridSearchService > searchMode=semantic uses only vector search
SqliteError: no such table: embedding_profiles
  Database.prepare node_modules/better-sqlite3/lib/methods/wrappers.js:5:21
  src/lib/server/search/hybrid.search.service.test.ts:772:5
770| // Create profile
771| client
772| .prepare(
 | ^
773| `INSERT INTO embedding_profiles (id, provider_kind, title, enabled…
774| VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`
⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯[26/36]⎯
 Test Files  3 failed | 19 passed (22)
 Tests  35 failed | 416 passed (451)
 Start at  19:10:26
 Duration  6.93s (transform 7.37s, setup 0ms, import 9.29s, tests 8.17s, environment 11ms)
 FAIL  Tests failed. Watching for file changes...
press h to show help, press q to quit
Cancelling test run. Press CTRL+c again to exit forcefully.