mirror of
https://github.com/codeaashu/claude-code.git
synced 2026-04-08 22:28:48 +03:00
claude-code
This commit is contained in:
193
web/lib/search/client-search.ts
Normal file
193
web/lib/search/client-search.ts
Normal file
@@ -0,0 +1,193 @@
|
||||
import type { Conversation, SearchFilters, SearchResult, SearchResultMatch } from "@/lib/types";
|
||||
import { extractTextContent } from "@/lib/utils";
|
||||
import { tokenize, excerpt, highlight } from "./highlighter";
|
||||
|
||||
/**
|
||||
* Score a text against a set of query tokens.
|
||||
* Returns 0 if no tokens match, otherwise a positive score.
|
||||
*/
|
||||
function scoreText(text: string, tokens: string[]): number {
|
||||
if (!text || tokens.length === 0) return 0;
|
||||
const lower = text.toLowerCase();
|
||||
let score = 0;
|
||||
|
||||
for (const token of tokens) {
|
||||
const idx = lower.indexOf(token);
|
||||
if (idx === -1) continue;
|
||||
|
||||
// Base score per token
|
||||
score += 1;
|
||||
|
||||
// Bonus for word boundary match
|
||||
const before = idx === 0 || /\W/.test(lower[idx - 1]);
|
||||
const after = idx + token.length >= lower.length || /\W/.test(lower[idx + token.length]);
|
||||
if (before && after) score += 0.5;
|
||||
|
||||
// Bonus for more occurrences (capped)
|
||||
const count = (lower.match(new RegExp(token.replace(/[.*+?^${}()|[\]\\]/g, "\\$&"), "g")) ?? []).length;
|
||||
score += Math.min(count - 1, 3) * 0.2;
|
||||
}
|
||||
|
||||
// Penalty if not all tokens match
|
||||
const matchedTokens = tokens.filter((t) => lower.includes(t));
|
||||
if (matchedTokens.length < tokens.length) {
|
||||
score *= matchedTokens.length / tokens.length;
|
||||
}
|
||||
|
||||
return score;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract plain-text content from a message for indexing.
|
||||
*/
|
||||
function messageText(content: Conversation["messages"][number]["content"]): string {
|
||||
if (typeof content === "string") return content;
|
||||
if (!Array.isArray(content)) return "";
|
||||
|
||||
return content
|
||||
.map((block) => {
|
||||
if (block.type === "text") return block.text;
|
||||
if (block.type === "tool_use") return `${block.name} ${JSON.stringify(block.input)}`;
|
||||
if (block.type === "tool_result") {
|
||||
return typeof block.content === "string"
|
||||
? block.content
|
||||
: extractTextContent(block.content);
|
||||
}
|
||||
return "";
|
||||
})
|
||||
.join(" ");
|
||||
}
|
||||
|
||||
/**
|
||||
* Run a client-side full-text search over an array of conversations.
|
||||
* Returns results sorted by relevance (highest score first).
|
||||
*/
|
||||
export function clientSearch(
|
||||
conversations: Conversation[],
|
||||
query: string,
|
||||
filters: SearchFilters = {}
|
||||
): SearchResult[] {
|
||||
const tokens = tokenize(query);
|
||||
if (tokens.length === 0 && !hasActiveFilters(filters)) return [];
|
||||
|
||||
const results: SearchResult[] = [];
|
||||
const now = Date.now();
|
||||
|
||||
for (const conv of conversations) {
|
||||
// --- Date filter ---
|
||||
if (filters.dateFrom && conv.updatedAt < filters.dateFrom) continue;
|
||||
if (filters.dateTo && conv.updatedAt > filters.dateTo + 86_400_000) continue;
|
||||
|
||||
// --- Conversation filter ---
|
||||
if (filters.conversationId && conv.id !== filters.conversationId) continue;
|
||||
|
||||
// --- Model filter ---
|
||||
if (filters.model && conv.model !== filters.model) continue;
|
||||
|
||||
// --- Tag filter ---
|
||||
if (filters.tagIds && filters.tagIds.length > 0) {
|
||||
const convTags = new Set(conv.tags ?? []);
|
||||
if (!filters.tagIds.some((tid) => convTags.has(tid))) continue;
|
||||
}
|
||||
|
||||
const matches: SearchResultMatch[] = [];
|
||||
let titleScore = 0;
|
||||
|
||||
// Score the conversation title
|
||||
if (tokens.length > 0) {
|
||||
titleScore = scoreText(conv.title, tokens) * 1.5; // title matches weight more
|
||||
}
|
||||
|
||||
for (const msg of conv.messages) {
|
||||
// --- Role filter ---
|
||||
if (filters.role && msg.role !== filters.role) continue;
|
||||
|
||||
// --- Content type filter ---
|
||||
if (filters.contentType) {
|
||||
const hasType = matchesContentType(msg.content, filters.contentType);
|
||||
if (!hasType) continue;
|
||||
}
|
||||
|
||||
const text = messageText(msg.content);
|
||||
if (!text) continue;
|
||||
|
||||
let msgScore = tokens.length > 0 ? scoreText(text, tokens) : 1;
|
||||
if (msgScore === 0) continue;
|
||||
|
||||
const ex = excerpt(text, query);
|
||||
const hl = highlight(ex, query);
|
||||
|
||||
matches.push({
|
||||
messageId: msg.id,
|
||||
role: msg.role,
|
||||
excerpt: ex,
|
||||
highlighted: hl,
|
||||
score: msgScore,
|
||||
});
|
||||
}
|
||||
|
||||
if (tokens.length === 0) {
|
||||
// Filter-only mode: include conversation with a synthetic match on the title
|
||||
results.push({
|
||||
conversationId: conv.id,
|
||||
conversationTitle: conv.title,
|
||||
conversationDate: conv.updatedAt,
|
||||
conversationModel: conv.model,
|
||||
matches: matches.length > 0 ? matches.slice(0, 5) : [],
|
||||
totalScore: 1,
|
||||
});
|
||||
continue;
|
||||
}
|
||||
|
||||
if (matches.length === 0 && titleScore === 0) continue;
|
||||
|
||||
const totalScore =
|
||||
titleScore + matches.reduce((sum, m) => sum + m.score, 0);
|
||||
|
||||
// Sort matches by score descending, keep top 5
|
||||
matches.sort((a, b) => b.score - a.score);
|
||||
|
||||
results.push({
|
||||
conversationId: conv.id,
|
||||
conversationTitle: conv.title,
|
||||
conversationDate: conv.updatedAt,
|
||||
conversationModel: conv.model,
|
||||
matches: matches.slice(0, 5),
|
||||
totalScore,
|
||||
});
|
||||
}
|
||||
|
||||
// Sort results by total score descending
|
||||
results.sort((a, b) => b.totalScore - a.totalScore);
|
||||
return results;
|
||||
}
|
||||
|
||||
function hasActiveFilters(filters: SearchFilters): boolean {
|
||||
return !!(
|
||||
filters.dateFrom ||
|
||||
filters.dateTo ||
|
||||
filters.role ||
|
||||
filters.conversationId ||
|
||||
filters.contentType ||
|
||||
filters.model ||
|
||||
(filters.tagIds && filters.tagIds.length > 0)
|
||||
);
|
||||
}
|
||||
|
||||
function matchesContentType(
|
||||
content: Conversation["messages"][number]["content"],
|
||||
type: NonNullable<SearchFilters["contentType"]>
|
||||
): boolean {
|
||||
if (typeof content === "string") return type === "text";
|
||||
if (!Array.isArray(content)) return false;
|
||||
|
||||
return content.some((block) => {
|
||||
if (type === "text" && block.type === "text") return true;
|
||||
if (type === "tool_use" && block.type === "tool_use") return true;
|
||||
if (type === "file" && block.type === "tool_use" && block.name?.includes("file")) return true;
|
||||
if (type === "code" && block.type === "text") {
|
||||
return block.text.includes("```") || block.text.includes(" ");
|
||||
}
|
||||
return false;
|
||||
});
|
||||
}
|
||||
70
web/lib/search/highlighter.ts
Normal file
70
web/lib/search/highlighter.ts
Normal file
@@ -0,0 +1,70 @@
|
||||
/**
|
||||
* Highlights occurrences of search terms in text by wrapping them in <mark> tags.
|
||||
* Returns an HTML string safe for use with dangerouslySetInnerHTML.
|
||||
*/
|
||||
export function highlight(text: string, query: string): string {
|
||||
if (!query.trim()) return escapeHtml(text);
|
||||
|
||||
const terms = tokenize(query);
|
||||
if (terms.length === 0) return escapeHtml(text);
|
||||
|
||||
// Build a regex that matches any of the terms (case-insensitive)
|
||||
const pattern = terms
|
||||
.map((t) => t.replace(/[.*+?^${}()|[\]\\]/g, "\\$&"))
|
||||
.join("|");
|
||||
const regex = new RegExp(`(${pattern})`, "gi");
|
||||
|
||||
return escapeHtml(text).replace(
|
||||
// Re-run on escaped HTML — we need to match original terms
|
||||
// So instead: split on matches then reassemble
|
||||
regex,
|
||||
(match) => `<mark class="search-highlight">${match}</mark>`
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a short excerpt (up to maxLength chars) centred around the first match.
|
||||
*/
|
||||
export function excerpt(text: string, query: string, maxLength = 160): string {
|
||||
if (!query.trim()) return text.slice(0, maxLength);
|
||||
|
||||
const terms = tokenize(query);
|
||||
if (terms.length === 0) return text.slice(0, maxLength);
|
||||
|
||||
const lowerText = text.toLowerCase();
|
||||
let matchIndex = -1;
|
||||
|
||||
for (const term of terms) {
|
||||
const idx = lowerText.indexOf(term.toLowerCase());
|
||||
if (idx !== -1) {
|
||||
matchIndex = idx;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (matchIndex === -1) return text.slice(0, maxLength);
|
||||
|
||||
const half = Math.floor(maxLength / 2);
|
||||
const start = Math.max(0, matchIndex - half);
|
||||
const end = Math.min(text.length, start + maxLength);
|
||||
const slice = text.slice(start, end);
|
||||
|
||||
return (start > 0 ? "…" : "") + slice + (end < text.length ? "…" : "");
|
||||
}
|
||||
|
||||
/** Tokenise a query string into non-empty lowercase words. */
|
||||
export function tokenize(query: string): string[] {
|
||||
return query
|
||||
.trim()
|
||||
.split(/\s+/)
|
||||
.filter((t) => t.length > 0);
|
||||
}
|
||||
|
||||
function escapeHtml(text: string): string {
|
||||
return text
|
||||
.replace(/&/g, "&")
|
||||
.replace(/</g, "<")
|
||||
.replace(/>/g, ">")
|
||||
.replace(/"/g, """)
|
||||
.replace(/'/g, "'");
|
||||
}
|
||||
36
web/lib/search/search-api.ts
Normal file
36
web/lib/search/search-api.ts
Normal file
@@ -0,0 +1,36 @@
|
||||
/**
|
||||
* Server-side search API client.
|
||||
*
|
||||
* Currently a stub — the app uses client-side search via client-search.ts.
|
||||
* When a backend search endpoint is available, swap clientSearch calls
|
||||
* in GlobalSearch.tsx for apiSearch.
|
||||
*/
|
||||
import type { SearchFilters, SearchResult } from "@/lib/types";
|
||||
|
||||
export interface SearchApiResponse {
|
||||
results: SearchResult[];
|
||||
total: number;
|
||||
took: number; // ms
|
||||
}
|
||||
|
||||
export async function apiSearch(
|
||||
query: string,
|
||||
filters: SearchFilters = {},
|
||||
page = 0,
|
||||
pageSize = 20,
|
||||
apiUrl = ""
|
||||
): Promise<SearchApiResponse> {
|
||||
const params = new URLSearchParams({ q: query, page: String(page), pageSize: String(pageSize) });
|
||||
|
||||
if (filters.dateFrom) params.set("dateFrom", String(filters.dateFrom));
|
||||
if (filters.dateTo) params.set("dateTo", String(filters.dateTo));
|
||||
if (filters.role) params.set("role", filters.role);
|
||||
if (filters.conversationId) params.set("conversationId", filters.conversationId);
|
||||
if (filters.contentType) params.set("contentType", filters.contentType);
|
||||
if (filters.model) params.set("model", filters.model);
|
||||
if (filters.tagIds?.length) params.set("tagIds", filters.tagIds.join(","));
|
||||
|
||||
const res = await fetch(`${apiUrl}/api/search?${params.toString()}`);
|
||||
if (!res.ok) throw new Error(`Search API error: ${res.status}`);
|
||||
return res.json() as Promise<SearchApiResponse>;
|
||||
}
|
||||
Reference in New Issue
Block a user