diff --git a/applications/web/sources/Application.tsx b/applications/web/sources/Application.tsx
index efd0ef2..47f6772 100644
--- a/applications/web/sources/Application.tsx
+++ b/applications/web/sources/Application.tsx
@@ -6,6 +6,7 @@ import { SignInPage } from './pages/SignInPage.tsx';
import { HomePage } from './pages/HomePage.tsx';
import { CrawlersPage } from './pages/CrawlersPage.tsx';
import { CrawlerNewPage } from './pages/CrawlerNewPage.tsx';
+import { CrawlerDetailPage } from './pages/CrawlerDetailPage.tsx';
import { SchedulersPage } from './pages/SchedulersPage.tsx';
import { SchedulerDetailPage } from './pages/SchedulerDetailPage.tsx';
import { AuthenticationCallbackPage } from './pages/AuthenticationCallbackPage.tsx';
@@ -70,6 +71,14 @@ function ApplicationRoutes() {
}
/>
+
+
+
+ }
+ />
void;
disabled?: boolean;
+ showDefaultTemplate?: boolean;
}
-export function CodeEditorPanel({ value, onChange, disabled }: CodeEditorPanelProperties) {
- const displayValue = value.length > 0 ? value : DEFAULT_CODE;
+export function CodeEditorPanel({ value, onChange, disabled, showDefaultTemplate = true }: CodeEditorPanelProperties) {
+ const displayValue = showDefaultTemplate && value.length === 0 ? DEFAULT_CODE : value;
const isOverLimit = displayValue.length > MAX_CODE_LENGTH;
return (
diff --git a/applications/web/sources/contexts/AuthenticationContext.test.tsx b/applications/web/sources/contexts/AuthenticationContext.test.tsx
index 5c4c2aa..6efe5b9 100644
--- a/applications/web/sources/contexts/AuthenticationContext.test.tsx
+++ b/applications/web/sources/contexts/AuthenticationContext.test.tsx
@@ -61,7 +61,7 @@ describe('AuthenticationContext', () => {
expect(result.current.isLoading).toBe(false);
});
- expect(result.current.user).toBeNull();
+ expect(result.current.user).toBeUndefined();
expect(result.current.isAuthenticated).toBe(false);
});
@@ -105,7 +105,7 @@ describe('AuthenticationContext', () => {
expect(result.current.isLoading).toBe(false);
});
- expect(result.current.user).toBeNull();
+ expect(result.current.user).toBeUndefined();
expect(result.current.isAuthenticated).toBe(false);
expect(localStorage.getItem(storageKey)).toBeNull();
});
@@ -122,7 +122,7 @@ describe('AuthenticationContext', () => {
expect(result.current.isLoading).toBe(false);
});
- expect(result.current.user).toBeNull();
+ expect(result.current.user).toBeUndefined();
expect(localStorage.getItem(storageKey)).toBeNull();
});
@@ -173,7 +173,7 @@ describe('AuthenticationContext', () => {
result.current.logout();
await vi.waitFor(() => {
- expect(result.current.user).toBeNull();
+ expect(result.current.user).toBeUndefined();
expect(result.current.isAuthenticated).toBe(false);
});
expect(localStorage.getItem(storageKey)).toBeNull();
diff --git a/applications/web/sources/hooks/use-authentication.test.tsx b/applications/web/sources/hooks/use-authentication.test.tsx
index 50e622d..ff1b258 100644
--- a/applications/web/sources/hooks/use-authentication.test.tsx
+++ b/applications/web/sources/hooks/use-authentication.test.tsx
@@ -30,7 +30,7 @@ describe('useAuthentication', () => {
});
test('isAuthenticated reflects user presence', async () => {
- const noUserContext = { ...mockContextValue, user: null, isAuthenticated: false };
+ const noUserContext = { ...mockContextValue, user: undefined, isAuthenticated: false };
const wrapper = ({ children }: { children: React.ReactNode }) => (
{children}
@@ -39,7 +39,7 @@ describe('useAuthentication', () => {
const { result } = await renderHook(() => useAuthentication(), { wrapper });
expect(result.current.isAuthenticated).toBe(false);
- expect(result.current.user).toBeNull();
+ expect(result.current.user).toBeUndefined();
});
test('provides login and logout functions', async () => {
diff --git a/applications/web/sources/hooks/use-crawler-code-runner.test.tsx b/applications/web/sources/hooks/use-crawler-code-runner.test.tsx
index 3f0acab..18318d5 100644
--- a/applications/web/sources/hooks/use-crawler-code-runner.test.tsx
+++ b/applications/web/sources/hooks/use-crawler-code-runner.test.tsx
@@ -26,8 +26,8 @@ describe('useCrawlerCodeRunner', () => {
});
expect(result.current.status).toBe('idle');
- expect(result.current.result).toBeNull();
- expect(result.current.error).toBeNull();
+ expect(result.current.result).toBeUndefined();
+ expect(result.current.error).toBeUndefined();
});
test('logs info entries when starting execution', async () => {
@@ -196,13 +196,13 @@ describe('useCrawlerCodeRunner', () => {
result.current.runTest('https://example.com', 'return {}');
await vi.waitFor(() => {
- expect(result.current.result).not.toBeNull();
+ expect(result.current.result).not.toBeUndefined();
});
result.current.reset();
await vi.waitFor(() => {
- expect(result.current.result).toBeNull();
+ expect(result.current.result).toBeUndefined();
expect(result.current.status).toBe('idle');
});
});
diff --git a/applications/web/sources/hooks/use-crawler-manager.test.tsx b/applications/web/sources/hooks/use-crawler-manager.test.tsx
index 337425b..08eaa08 100644
--- a/applications/web/sources/hooks/use-crawler-manager.test.tsx
+++ b/applications/web/sources/hooks/use-crawler-manager.test.tsx
@@ -2,7 +2,7 @@ import { renderHook } from 'vitest-browser-react';
import { QueryClient, QueryClientProvider } from '@tanstack/react-query';
import { http, HttpResponse } from 'msw';
import { test, expect } from '../tests/extensions.ts';
-import { useCreateCrawler, useListCrawlers, useDeleteCrawler } from './use-crawler-manager.ts';
+import { useCreateCrawler, useListCrawlers, useDeleteCrawler, useGetCrawler, useUpdateCrawler } from './use-crawler-manager.ts';
import { worker } from '../tests/mocks/browser.ts';
import type { ReactNode } from 'react';
@@ -214,3 +214,232 @@ describe('useDeleteCrawler', () => {
expect(capturedAuthorization).toBe('Bearer test-access-token');
});
});
+
+describe('useGetCrawler', () => {
+ test('fetches a single crawler by id', async () => {
+ const mockCrawler = {
+ id: 'c1',
+ user_uuid: 'u1',
+ name: 'Detail Crawler',
+ type: 'web',
+ url_pattern: '^https://example\\.com',
+ code: '(body) => body.length',
+ input_schema: { body: 'string' },
+ output_schema: {},
+ created_at: '2024-01-01T00:00:00Z',
+ updated_at: '2024-01-02T00:00:00Z',
+ };
+ worker.use(
+ http.get(`${MANAGER_URL}/crawlers/c1`, async () => {
+ return HttpResponse.json(mockCrawler);
+ }),
+ );
+
+ const { result } = await renderHook(() => useGetCrawler('c1'), {
+ wrapper: createWrapper(),
+ });
+
+ await vi.waitFor(() => {
+ expect(result.current.crawler).toBeDefined();
+ });
+
+ expect(result.current.crawler).toEqual(mockCrawler);
+ });
+
+ test('does not fetch when id is undefined', async () => {
+ let requestCount = 0;
+ worker.use(
+ http.get(`${MANAGER_URL}/crawlers/:id`, async () => {
+ requestCount += 1;
+ return HttpResponse.json({});
+ }),
+ );
+
+ const { result } = await renderHook(() => useGetCrawler(undefined), {
+ wrapper: createWrapper(),
+ });
+
+ await vi.waitFor(() => {
+ expect(result.current.isLoading).toBe(false);
+ });
+ expect(result.current.crawler).toBeUndefined();
+ expect(requestCount).toBe(0);
+ });
+
+ test('throws on server error', async () => {
+ worker.use(
+ http.get(`${MANAGER_URL}/crawlers/c1`, async () => {
+ return HttpResponse.json(
+ { error: 'not_found', error_description: 'Crawler not found' },
+ { status: 404 },
+ );
+ }),
+ );
+
+ const { result } = await renderHook(() => useGetCrawler('c1'), {
+ wrapper: createWrapper(),
+ });
+
+ await vi.waitFor(() => {
+ expect(result.current.error).toBeDefined();
+ });
+
+ expect(result.current.error?.message).toBe('Crawler not found');
+ });
+
+ test('includes Authorization header in GET request', async () => {
+ let capturedAuthorization: string | null = null;
+ worker.use(
+ http.get(`${MANAGER_URL}/crawlers/c1`, async ({ request }) => {
+ capturedAuthorization = request.headers.get('Authorization');
+ return HttpResponse.json({ id: 'c1' });
+ }),
+ );
+
+ const { result } = await renderHook(() => useGetCrawler('c1'), {
+ wrapper: createWrapper(),
+ });
+
+ await vi.waitFor(() => {
+ expect(result.current.crawler).toBeDefined();
+ });
+
+ expect(capturedAuthorization).toBe('Bearer test-access-token');
+ });
+});
+
+describe('useUpdateCrawler', () => {
+ test('updates a crawler and returns the new row', async () => {
+ const updated = {
+ id: 'c1',
+ user_uuid: 'u1',
+ name: 'Renamed',
+ type: 'web',
+ url_pattern: '^https://example\\.com',
+ code: '(body) => 1',
+ input_schema: { body: 'string' },
+ output_schema: {},
+ created_at: '2024-01-01T00:00:00Z',
+ updated_at: '2024-01-03T00:00:00Z',
+ };
+ worker.use(
+ http.put(`${MANAGER_URL}/crawlers/c1`, async () => {
+ return HttpResponse.json(updated);
+ }),
+ );
+
+ const { result } = await renderHook(() => useUpdateCrawler(), {
+ wrapper: createWrapper(),
+ });
+
+ const response = await result.current.updateCrawler({
+ id: 'c1',
+ name: 'Renamed',
+ type: 'web',
+ url_pattern: '^https://example\\.com',
+ code: '(body) => 1',
+ output_schema: {},
+ });
+
+ expect(response).toEqual(updated);
+ });
+
+ test('sends PUT body without id field', async () => {
+ let capturedBody: Record | undefined;
+ worker.use(
+ http.put(`${MANAGER_URL}/crawlers/c1`, async ({ request }) => {
+ capturedBody = (await request.json()) as Record;
+ return HttpResponse.json({
+ id: 'c1',
+ user_uuid: 'u1',
+ name: 'X',
+ type: 'web',
+ url_pattern: '',
+ code: '',
+ input_schema: {},
+ output_schema: {},
+ created_at: '',
+ updated_at: '',
+ });
+ }),
+ );
+
+ const { result } = await renderHook(() => useUpdateCrawler(), {
+ wrapper: createWrapper(),
+ });
+
+ await result.current.updateCrawler({
+ id: 'c1',
+ name: 'X',
+ type: 'web',
+ url_pattern: '',
+ code: '',
+ output_schema: {},
+ });
+
+ expect(capturedBody).toBeDefined();
+ expect(capturedBody && 'id' in capturedBody).toBe(false);
+ expect(capturedBody?.name).toBe('X');
+ });
+
+ test('throws on update error', async () => {
+ worker.use(
+ http.put(`${MANAGER_URL}/crawlers/c1`, async () => {
+ return HttpResponse.json(
+ { error: 'invalid_request', error_description: 'Name too long' },
+ { status: 400 },
+ );
+ }),
+ );
+
+ const { result } = await renderHook(() => useUpdateCrawler(), {
+ wrapper: createWrapper(),
+ });
+
+ await expect(
+ result.current.updateCrawler({
+ id: 'c1',
+ name: 'X',
+ type: 'web',
+ url_pattern: '',
+ code: '',
+ output_schema: {},
+ }),
+ ).rejects.toThrow('Name too long');
+ });
+
+ test('includes Authorization header in PUT request', async () => {
+ let capturedAuthorization: string | null = null;
+ worker.use(
+ http.put(`${MANAGER_URL}/crawlers/c1`, async ({ request }) => {
+ capturedAuthorization = request.headers.get('Authorization');
+ return HttpResponse.json({
+ id: 'c1',
+ user_uuid: 'u1',
+ name: 'X',
+ type: 'web',
+ url_pattern: '^.*$',
+ code: '(b)=>b',
+ input_schema: { body: 'string' },
+ output_schema: {},
+ created_at: '',
+ updated_at: '',
+ });
+ }),
+ );
+
+ const { result } = await renderHook(() => useUpdateCrawler(), {
+ wrapper: createWrapper(),
+ });
+
+ await result.current.updateCrawler({
+ id: 'c1',
+ name: 'X',
+ type: 'web',
+ url_pattern: '^.*$',
+ code: '(b)=>b',
+ });
+
+ expect(capturedAuthorization).toBe('Bearer test-access-token');
+ });
+});
diff --git a/applications/web/sources/hooks/use-crawler-manager.ts b/applications/web/sources/hooks/use-crawler-manager.ts
index c229384..bf6b921 100644
--- a/applications/web/sources/hooks/use-crawler-manager.ts
+++ b/applications/web/sources/hooks/use-crawler-manager.ts
@@ -1,4 +1,4 @@
-import { useMutation, useInfiniteQuery, useQueryClient } from '@tanstack/react-query';
+import { useMutation, useInfiniteQuery, useQuery, useQueryClient, skipToken } from '@tanstack/react-query';
import { loadAuthenticationData } from '@audio-underview/sign-provider';
import type { CrawlerRow } from '@audio-underview/supabase-connector';
@@ -22,6 +22,26 @@ interface CreateCrawlerInput {
code: string;
}
+interface UpdateCrawlerWebInput {
+ id: string;
+ type: 'web';
+ name: string;
+ url_pattern: string;
+ code: string;
+ output_schema?: Record;
+}
+
+interface UpdateCrawlerDataInput {
+ id: string;
+ type: 'data';
+ name: string;
+ code: string;
+ input_schema: Record;
+ output_schema?: Record;
+}
+
+type UpdateCrawlerInput = UpdateCrawlerWebInput | UpdateCrawlerDataInput;
+
function getAccessToken(): string {
const authenticationData = loadAuthenticationData();
if (!authenticationData) {
@@ -101,6 +121,52 @@ async function listCrawlersRequest(parameters: ListCrawlersParameters): Promise<
return body as ListCrawlersResponse;
}
+async function getCrawlerRequest(id: string): Promise {
+ const baseURL = getBaseURL();
+ const accessToken = getAccessToken();
+
+ const response = await fetch(`${baseURL}/crawlers/${id}`, {
+ method: 'GET',
+ headers: {
+ Authorization: `Bearer ${accessToken}`,
+ },
+ signal: AbortSignal.timeout(FETCH_TIMEOUT_MS),
+ });
+
+ const body = await parseResponseJSON(response);
+
+ if (!response.ok) {
+ throwResponseError(body, response.status);
+ }
+
+ return body as CrawlerRow;
+}
+
+async function updateCrawlerRequest(input: UpdateCrawlerInput): Promise {
+ const baseURL = getBaseURL();
+ const accessToken = getAccessToken();
+
+ const { id, ...payload } = input;
+
+ const response = await fetch(`${baseURL}/crawlers/${id}`, {
+ method: 'PUT',
+ headers: {
+ 'Content-Type': 'application/json',
+ Authorization: `Bearer ${accessToken}`,
+ },
+ body: JSON.stringify(payload),
+ signal: AbortSignal.timeout(FETCH_TIMEOUT_MS),
+ });
+
+ const body = await parseResponseJSON(response);
+
+ if (!response.ok) {
+ throwResponseError(body, response.status);
+ }
+
+ return body as CrawlerRow;
+}
+
async function deleteCrawlerRequest(id: string): Promise {
const baseURL = getBaseURL();
const accessToken = getAccessToken();
@@ -121,6 +187,10 @@ async function deleteCrawlerRequest(id: string): Promise {
const CRAWLERS_QUERY_KEY = ['crawlers'] as const;
+function crawlerDetailKey(id: string) {
+ return ['crawlers', id] as const;
+}
+
export function useCreateCrawler() {
const queryClient = useQueryClient();
@@ -171,6 +241,44 @@ export function useListCrawlers() {
};
}
+const CRAWLER_DETAIL_DISABLED_KEY = ['crawlers', 'detail', 'disabled'] as const;
+
+export function useGetCrawler(id: string | undefined) {
+ const authenticationData = loadAuthenticationData();
+ const accessToken = authenticationData?.credential ?? undefined;
+
+ const query = useQuery({
+ queryKey: id ? crawlerDetailKey(id) : CRAWLER_DETAIL_DISABLED_KEY,
+ queryFn: accessToken && id ? () => getCrawlerRequest(id) : skipToken,
+ });
+
+ return {
+ crawler: query.data ?? undefined,
+ isLoading: query.isLoading,
+ error: query.error ?? undefined,
+ refetch: query.refetch,
+ };
+}
+
+export function useUpdateCrawler() {
+ const queryClient = useQueryClient();
+
+ const mutation = useMutation({
+ mutationFn: updateCrawlerRequest,
+ onSuccess: (data) => {
+ queryClient.invalidateQueries({ queryKey: CRAWLERS_QUERY_KEY, exact: true });
+ queryClient.setQueryData(crawlerDetailKey(data.id), data);
+ },
+ });
+
+ return {
+ updateCrawler: mutation.mutateAsync,
+ status: mutation.status,
+ error: mutation.error ?? undefined,
+ reset: mutation.reset,
+ };
+}
+
export function useDeleteCrawler() {
const queryClient = useQueryClient();
diff --git a/applications/web/sources/pages/CrawlerDetailPage.test.tsx b/applications/web/sources/pages/CrawlerDetailPage.test.tsx
new file mode 100644
index 0000000..0d874af
--- /dev/null
+++ b/applications/web/sources/pages/CrawlerDetailPage.test.tsx
@@ -0,0 +1,278 @@
+import { render } from 'vitest-browser-react';
+import { MemoryRouter, Route, Routes } from 'react-router';
+import { QueryClient, QueryClientProvider } from '@tanstack/react-query';
+import { http, HttpResponse } from 'msw';
+import { test, expect } from '../tests/extensions.ts';
+import { CrawlerDetailPage } from './CrawlerDetailPage.tsx';
+import { ToastProvider } from '../contexts/ToastContext.tsx';
+import { AuthenticationContext } from '../contexts/authentication-context-value.ts';
+import type { AuthenticationContextValue } from '../contexts/authentication-context-value.ts';
+import { worker } from '../tests/mocks/browser.ts';
+import { page } from '@vitest/browser/context';
+import type { ReactNode } from 'react';
+
+const MANAGER_URL = 'http://localhost:8888';
+
+vi.mock('@audio-underview/sign-provider', async (importOriginal) => {
+ const actual = await importOriginal();
+ return {
+ ...actual,
+ loadAuthenticationData: vi.fn(() => ({
+ user: { id: 'u1', name: 'Test', email: 'test@example.com', provider: 'google' },
+ credential: 'test-access-token',
+ expiresAt: Date.now() + 3_600_000,
+ })),
+ };
+});
+
+function createAuth(): AuthenticationContextValue {
+ return {
+ user: { id: 'u1', name: 'Test', email: 'test@example.com', provider: 'google' },
+ isAuthenticated: true,
+ isLoading: false,
+ enabledProviders: [],
+ isGoogleConfigured: false,
+ isGitHubConfigured: false,
+ loginWithGoogle: vi.fn(),
+ loginWithGitHub: vi.fn(),
+ loginWithProvider: vi.fn().mockReturnValue({ success: true }),
+ logout: vi.fn(),
+ };
+}
+
+function renderPage(initialPath: string, children?: ReactNode) {
+ const queryClient = new QueryClient({
+ defaultOptions: {
+ queries: { retry: false },
+ mutations: { retry: false },
+ },
+ });
+
+ return render(
+
+
+
+
+
+ } />
+ Crawlers List
} />
+
+ {children}
+
+
+
+ ,
+ );
+}
+
+const mockCrawler = {
+ id: 'c1',
+ user_uuid: 'u1',
+ name: 'Detail Crawler',
+ type: 'web',
+ url_pattern: '^https://example\\.com',
+ code: '(body) => body.length',
+ input_schema: { body: 'string' },
+ output_schema: { count: 'number' },
+ created_at: '2024-01-01T00:00:00Z',
+ updated_at: '2024-01-02T00:00:00Z',
+};
+
+describe('CrawlerDetailPage', () => {
+ test('renders crawler details after loading', async () => {
+ worker.use(
+ http.get(`${MANAGER_URL}/crawlers/c1`, async () => {
+ return HttpResponse.json(mockCrawler);
+ }),
+ );
+
+ await renderPage('/crawlers/c1');
+
+ await expect.element(page.getByLabelText('Crawler name')).toHaveValue('Detail Crawler');
+ await expect.element(page.getByText('web', { exact: true })).toBeVisible();
+ await expect.element(page.getByLabelText('URL pattern')).toHaveValue('^https://example\\.com');
+ });
+
+ test('Save button is disabled until form is dirty', async () => {
+ worker.use(
+ http.get(`${MANAGER_URL}/crawlers/c1`, async () => {
+ return HttpResponse.json(mockCrawler);
+ }),
+ );
+
+ await renderPage('/crawlers/c1');
+
+ await expect.element(page.getByLabelText('Crawler name')).toHaveValue('Detail Crawler');
+ await expect.element(page.getByRole('button', { name: /Save/ })).toBeDisabled();
+ });
+
+ test('submits full body on save after name change', async () => {
+ let capturedBody: Record | undefined;
+ worker.use(
+ http.get(`${MANAGER_URL}/crawlers/c1`, async () => {
+ return HttpResponse.json(mockCrawler);
+ }),
+ http.put(`${MANAGER_URL}/crawlers/c1`, async ({ request }) => {
+ capturedBody = (await request.json()) as Record;
+ return HttpResponse.json({ ...mockCrawler, name: 'Renamed' });
+ }),
+ );
+
+ await renderPage('/crawlers/c1');
+
+ const nameInput = page.getByLabelText('Crawler name');
+ await expect.element(nameInput).toBeVisible();
+ await nameInput.fill('Renamed');
+
+ const saveButton = page.getByRole('button', { name: /Save/ });
+ await expect.element(saveButton).toBeEnabled();
+ await saveButton.click();
+
+ await vi.waitFor(() => {
+ expect(capturedBody).toBeDefined();
+ });
+
+ expect(capturedBody?.name).toBe('Renamed');
+ expect(capturedBody?.type).toBe('web');
+ expect(capturedBody?.url_pattern).toBe('^https://example\\.com');
+ expect(capturedBody).toMatchObject({ output_schema: { count: 'number' } });
+ expect(capturedBody && 'id' in capturedBody).toBe(false);
+ });
+
+ test('shows error state and retry when GET fails', async () => {
+ worker.use(
+ http.get(`${MANAGER_URL}/crawlers/c1`, async () => {
+ return HttpResponse.json(
+ { error: 'not_found', error_description: 'Crawler not found' },
+ { status: 404 },
+ );
+ }),
+ );
+
+ await renderPage('/crawlers/c1');
+
+ await expect.element(page.getByText('Failed to load crawler.')).toBeVisible();
+ await expect.element(page.getByRole('button', { name: /Retry/ })).toBeVisible();
+ });
+
+ test('renders data crawler without URL pattern field', async () => {
+ worker.use(
+ http.get(`${MANAGER_URL}/crawlers/c1`, async () => {
+ return HttpResponse.json({
+ ...mockCrawler,
+ type: 'data',
+ url_pattern: null,
+ input_schema: { userIds: 'string[]' },
+ });
+ }),
+ );
+
+ await renderPage('/crawlers/c1');
+
+ await expect.element(page.getByText('data', { exact: true })).toBeVisible();
+ expect(page.getByLabelText('URL pattern').query()).toBeNull();
+ const inputSchema = page.getByLabelText('Input schema');
+ await expect.element(inputSchema).toBeVisible();
+ await expect.element(inputSchema).not.toHaveAttribute('readonly');
+ });
+
+ test('Save stays disabled when name is cleared', async () => {
+ worker.use(
+ http.get(`${MANAGER_URL}/crawlers/c1`, async () => {
+ return HttpResponse.json(mockCrawler);
+ }),
+ );
+
+ await renderPage('/crawlers/c1');
+
+ const nameInput = page.getByLabelText('Crawler name');
+ await expect.element(nameInput).toBeVisible();
+ await nameInput.fill('');
+
+ await expect.element(page.getByRole('button', { name: /Save/ })).toBeDisabled();
+ });
+
+ test('Revert restores pristine values and disables Save', async () => {
+ worker.use(
+ http.get(`${MANAGER_URL}/crawlers/c1`, async () => {
+ return HttpResponse.json(mockCrawler);
+ }),
+ );
+
+ await renderPage('/crawlers/c1');
+
+ const nameInput = page.getByLabelText('Crawler name');
+ await expect.element(nameInput).toHaveValue('Detail Crawler');
+ await nameInput.fill('Renamed Draft');
+ await expect.element(page.getByRole('button', { name: /Save/ })).toBeEnabled();
+
+ await page.getByRole('button', { name: /Revert/ }).click();
+
+ await expect.element(nameInput).toHaveValue('Detail Crawler');
+ await expect.element(page.getByRole('button', { name: /Save/ })).toBeDisabled();
+ await expect.element(page.getByRole('button', { name: /Revert/ })).toBeDisabled();
+ });
+
+ test('shows error toast when PUT fails', async () => {
+ worker.use(
+ http.get(`${MANAGER_URL}/crawlers/c1`, async () => {
+ return HttpResponse.json(mockCrawler);
+ }),
+ http.put(`${MANAGER_URL}/crawlers/c1`, async () => {
+ return HttpResponse.json(
+ { error: 'server_error', error_description: 'Internal error' },
+ { status: 500 },
+ );
+ }),
+ );
+
+ await renderPage('/crawlers/c1');
+
+ const nameInput = page.getByLabelText('Crawler name');
+ await expect.element(nameInput).toBeVisible();
+ await nameInput.fill('Renamed');
+
+ await page.getByRole('button', { name: /Save/ }).click();
+
+ await expect.element(page.getByText('Internal error')).toBeVisible();
+ await expect.element(page.getByRole('button', { name: /Save/ })).toBeEnabled();
+ await expect.element(nameInput).toHaveValue('Renamed');
+ });
+
+ test('shows inline error and disables save on invalid JSON schema', async () => {
+ let putCount = 0;
+ worker.use(
+ http.get(`${MANAGER_URL}/crawlers/c1`, async () => {
+ return HttpResponse.json(mockCrawler);
+ }),
+ http.put(`${MANAGER_URL}/crawlers/c1`, async () => {
+ putCount += 1;
+ return HttpResponse.json(mockCrawler);
+ }),
+ );
+
+ await renderPage('/crawlers/c1');
+
+ const outputSchema = page.getByLabelText('Output schema');
+ await expect.element(outputSchema).toBeVisible();
+ await outputSchema.fill('{ not json');
+
+ const saveButton = page.getByRole('button', { name: /Save/ });
+ await saveButton.click();
+
+ await expect.element(page.getByText('Must be a valid JSON object.')).toBeVisible();
+ await expect.element(saveButton).toBeDisabled();
+ expect(putCount).toBe(0);
+
+ await outputSchema.fill('{"count":"number"}');
+
+ // onChange clears the inline error immediately when value becomes valid — no blur required.
+ await expect.element(page.getByText('Must be a valid JSON object.')).not.toBeInTheDocument();
+ await expect.element(saveButton).toBeEnabled();
+ await saveButton.click();
+
+ await vi.waitFor(() => {
+ expect(putCount).toBe(1);
+ });
+ });
+});
diff --git a/applications/web/sources/pages/CrawlerDetailPage.tsx b/applications/web/sources/pages/CrawlerDetailPage.tsx
new file mode 100644
index 0000000..7a03020
--- /dev/null
+++ b/applications/web/sources/pages/CrawlerDetailPage.tsx
@@ -0,0 +1,617 @@
+import { useMemo, useState } from 'react';
+import styled from '@emotion/styled';
+import { keyframes } from '@emotion/react';
+import { FontAwesomeIcon } from '@fortawesome/react-fontawesome';
+import { faSignOutAlt, faArrowLeft, faArrowsRotate, faFloppyDisk, faRotateLeft } from '@fortawesome/free-solid-svg-icons';
+import { useParams, useNavigate } from 'react-router';
+import type { CrawlerRow } from '@audio-underview/supabase-connector';
+import { useAuthentication } from '../hooks/use-authentication.ts';
+import { useGetCrawler, useUpdateCrawler } from '../hooks/use-crawler-manager.ts';
+import { useToast } from '../hooks/use-toast.ts';
+import { NavigationLinks } from '../components/NavigationLinks.tsx';
+import { Header, LogoutButton } from '../components/PageHeader.tsx';
+import { CodeEditorPanel } from '../components/crawlers/CodeEditorPanel.tsx';
+
+const fadeIn = keyframes`
+ from { opacity: 0; }
+ to { opacity: 1; }
+`;
+
+const spin = keyframes`
+ from { transform: rotate(0deg); }
+ to { transform: rotate(360deg); }
+`;
+
+const PageContainer = styled.div`
+ min-height: 100vh;
+ background: var(--bg-deep);
+`;
+
+const Main = styled.main`
+ padding: 2rem 1.5rem;
+ max-width: 960px;
+ margin: 0 auto;
+ animation: ${fadeIn} 0.4s ease-out;
+`;
+
+const BackButton = styled.button`
+ display: inline-flex;
+ align-items: center;
+ gap: 0.375rem;
+ padding: 0.375rem 0.75rem;
+ border-radius: 6px;
+ font-size: 0.8125rem;
+ font-weight: 500;
+ color: var(--text-secondary);
+ cursor: pointer;
+ transition: var(--transition-fast);
+ margin-bottom: 1.25rem;
+
+ &:hover {
+ color: var(--text-primary);
+ background: var(--bg-surface);
+ }
+`;
+
+const LoadingContainer = styled.div`
+ display: flex;
+ align-items: center;
+ justify-content: center;
+ padding: 4rem 1.5rem;
+`;
+
+const Spinner = styled.div`
+ width: 32px;
+ height: 32px;
+ border: 3px solid var(--border-subtle);
+ border-top-color: var(--accent-primary);
+ border-radius: 50%;
+ animation: ${spin} 0.8s linear infinite;
+`;
+
+const ErrorState = styled.div`
+ display: flex;
+ flex-direction: column;
+ align-items: center;
+ justify-content: center;
+ padding: 4rem 1.5rem;
+ text-align: center;
+`;
+
+const ErrorMessage = styled.p`
+ font-size: 1rem;
+ color: var(--text-muted);
+ margin: 0 0 1.5rem 0;
+`;
+
+const RetryButton = styled.button`
+ display: inline-flex;
+ align-items: center;
+ gap: 0.5rem;
+ padding: 0.75rem 1.5rem;
+ border-radius: 8px;
+ font-size: 0.875rem;
+ font-weight: 600;
+ color: var(--text-primary);
+ background: var(--bg-surface);
+ border: 1px solid var(--border-subtle);
+ cursor: pointer;
+ transition: var(--transition-fast);
+
+ &:hover {
+ border-color: var(--border-focus);
+ }
+`;
+
+const Section = styled.section`
+ margin-bottom: 1.5rem;
+ padding: 1.25rem;
+ background: var(--bg-surface);
+ border: 1px solid var(--border-subtle);
+ border-radius: 10px;
+`;
+
+const SectionHeader = styled.div`
+ display: flex;
+ align-items: center;
+ justify-content: space-between;
+ gap: 0.75rem;
+ margin-bottom: 1rem;
+`;
+
+const SectionTitle = styled.h2`
+ font-size: 0.8125rem;
+ font-weight: 600;
+ color: var(--text-secondary);
+ text-transform: uppercase;
+ letter-spacing: 0.05em;
+ margin: 0;
+`;
+
+const NameInput = styled.input`
+ font-size: 1.25rem;
+ font-weight: 700;
+ color: var(--text-primary);
+ background: var(--bg-deep);
+ border: 1px solid var(--border-subtle);
+ border-radius: 6px;
+ padding: 0.5rem 0.75rem;
+ outline: none;
+ width: 100%;
+ margin-bottom: 1rem;
+
+ &:focus {
+ border-color: var(--border-focus);
+ }
+`;
+
+const MetaGrid = styled.div`
+ display: grid;
+ grid-template-columns: repeat(auto-fill, minmax(200px, 1fr));
+ gap: 0.75rem;
+`;
+
+const MetaItem = styled.div`
+ display: flex;
+ flex-direction: column;
+ gap: 0.25rem;
+`;
+
+const MetaLabel = styled.span`
+ font-size: 0.6875rem;
+ font-weight: 600;
+ color: var(--text-muted);
+ text-transform: uppercase;
+ letter-spacing: 0.05em;
+`;
+
+const MetaValue = styled.span`
+ font-size: 0.875rem;
+ color: var(--text-secondary);
+ font-family: var(--font-mono);
+ word-break: break-all;
+`;
+
+const TextInput = styled.input`
+ font-size: 0.875rem;
+ color: var(--text-primary);
+ background: var(--bg-deep);
+ border: 1px solid var(--border-subtle);
+ border-radius: 6px;
+ padding: 0.375rem 0.625rem;
+ outline: none;
+ font-family: var(--font-mono);
+
+ &:focus {
+ border-color: var(--border-focus);
+ }
+`;
+
+const TypeBadge = styled.span`
+ display: inline-flex;
+ align-items: center;
+ width: fit-content;
+ padding: 0.125rem 0.5rem;
+ border-radius: 999px;
+ font-size: 0.6875rem;
+ font-weight: 600;
+ text-transform: uppercase;
+ letter-spacing: 0.05em;
+ color: var(--accent-primary);
+ background: rgba(99, 102, 241, 0.12);
+ border: 1px solid var(--border-subtle);
+`;
+
+const SchemaArea = styled('textarea', {
+ shouldForwardProp: (prop) => prop !== 'hasError',
+})<{ hasError?: boolean }>`
+ width: 100%;
+ min-height: 120px;
+ font-family: var(--font-mono);
+ font-size: 0.8125rem;
+ color: var(--text-primary);
+ background: var(--bg-deep);
+ border: 1px solid ${({ hasError }) => (hasError ? 'var(--color-error)' : 'var(--border-subtle)')};
+ border-radius: 6px;
+ padding: 0.5rem 0.625rem;
+ resize: vertical;
+ outline: none;
+
+ &:focus {
+ border-color: ${({ hasError }) => (hasError ? 'var(--color-error)' : 'var(--border-focus)')};
+ }
+
+ &:read-only {
+ color: var(--text-muted);
+ cursor: not-allowed;
+ }
+`;
+
+const SchemaHelper = styled('span', {
+ shouldForwardProp: (prop) => prop !== 'isError',
+})<{ isError?: boolean }>`
+ display: block;
+ margin-top: 0.375rem;
+ font-size: 0.75rem;
+ color: ${({ isError }) => (isError ? 'var(--color-error)' : 'var(--text-muted)')};
+`;
+
+const ActionRow = styled.div`
+ display: flex;
+ justify-content: flex-end;
+ gap: 0.5rem;
+ margin-top: 1.25rem;
+`;
+
+const SaveButton = styled.button`
+ display: inline-flex;
+ align-items: center;
+ gap: 0.5rem;
+ padding: 0.5rem 1rem;
+ border-radius: 8px;
+ font-size: 0.8125rem;
+ font-weight: 600;
+ color: var(--text-primary);
+ background: var(--accent-primary);
+ cursor: pointer;
+ transition: var(--transition-fast);
+
+ &:hover:not(:disabled) {
+ opacity: 0.9;
+ }
+
+ &:disabled {
+ cursor: not-allowed;
+ opacity: 0.5;
+ }
+`;
+
+const RevertButton = styled.button`
+ display: inline-flex;
+ align-items: center;
+ gap: 0.5rem;
+ padding: 0.5rem 1rem;
+ border-radius: 8px;
+ font-size: 0.8125rem;
+ font-weight: 500;
+ color: var(--text-secondary);
+ background: transparent;
+ border: 1px solid var(--border-subtle);
+ cursor: pointer;
+ transition: var(--transition-fast);
+
+ &:hover:not(:disabled) {
+ color: var(--text-primary);
+ background: var(--bg-deep);
+ }
+
+ &:disabled {
+ cursor: not-allowed;
+ opacity: 0.5;
+ }
+`;
+
+function formatDateTime(dateString: string) {
+ return new Date(dateString).toLocaleString(undefined, {
+ year: 'numeric',
+ month: 'short',
+ day: 'numeric',
+ hour: '2-digit',
+ minute: '2-digit',
+ });
+}
+
+function stringifySchema(schema: Record) {
+ return JSON.stringify(schema, null, 2);
+}
+
+interface FormState {
+ name: string;
+ url_pattern: string;
+ code: string;
+ input_schema: string;
+ output_schema: string;
+}
+
+function deriveFormState(crawler: CrawlerRow): FormState {
+ return {
+ name: crawler.name,
+ url_pattern: crawler.url_pattern ?? '',
+ code: crawler.code,
+ input_schema: stringifySchema(crawler.input_schema),
+ output_schema: stringifySchema(crawler.output_schema),
+ };
+}
+
+function tryParseSchema(raw: string): Record | undefined {
+ try {
+ const parsed = JSON.parse(raw) as unknown;
+ if (typeof parsed !== 'object' || parsed === undefined || parsed === null || Array.isArray(parsed)) {
+ return undefined;
+ }
+ return parsed as Record;
+ } catch {
+ return undefined;
+ }
+}
+
+interface SchemaErrors {
+ input_schema?: string;
+ output_schema?: string;
+}
+
+export function CrawlerDetailPage() {
+ const { id } = useParams<{ id: string }>();
+ const navigate = useNavigate();
+ const { logout } = useAuthentication();
+ const { showToast } = useToast();
+ const { crawler, isLoading, error, refetch } = useGetCrawler(id);
+ const { updateCrawler, status: updateStatus } = useUpdateCrawler();
+
+ const [form, setForm] = useState(undefined);
+ const [pristine, setPristine] = useState(undefined);
+ const [seededForCrawlerID, setSeededForCrawlerID] = useState(undefined);
+ const [schemaErrors, setSchemaErrors] = useState({});
+
+ if (crawler && seededForCrawlerID !== crawler.id) {
+ const initial = deriveFormState(crawler);
+ setSeededForCrawlerID(crawler.id);
+ setForm(initial);
+ setPristine(initial);
+ setSchemaErrors({});
+ }
+
+ const isDirty = useMemo(() => {
+ if (!form || !pristine) return false;
+ return (
+ form.name !== pristine.name ||
+ form.url_pattern !== pristine.url_pattern ||
+ form.code !== pristine.code ||
+ form.input_schema !== pristine.input_schema ||
+ form.output_schema !== pristine.output_schema
+ );
+ }, [form, pristine]);
+
+ const hasSchemaError = !!schemaErrors.input_schema || !!schemaErrors.output_schema;
+ const trimmedName = form?.name.trim() ?? '';
+ const canSave =
+ !!form &&
+ isDirty &&
+ !hasSchemaError &&
+ trimmedName.length > 0 &&
+ form.code.trim().length > 0 &&
+ (crawler?.type === 'data' || form.url_pattern.trim().length > 0);
+
+ const isSaving = updateStatus === 'pending';
+
+ const handleRevert = () => {
+ if (pristine) {
+ setForm(pristine);
+ setSchemaErrors({});
+ }
+ };
+
+ const validateSchemaField = (field: 'input_schema' | 'output_schema', raw: string) => {
+ setSchemaErrors((previous) => {
+ const next = { ...previous };
+ const parsed = tryParseSchema(raw);
+ if (parsed === undefined) {
+ next[field] = 'Must be a valid JSON object.';
+ } else {
+ delete next[field];
+ }
+ return next;
+ });
+ };
+
+ const handleSchemaChange = (field: 'input_schema' | 'output_schema', raw: string) => {
+ setForm((previous) => (previous ? { ...previous, [field]: raw } : previous));
+ setSchemaErrors((previous) => {
+ if (!previous[field] || tryParseSchema(raw) === undefined) return previous;
+ const next = { ...previous };
+ delete next[field];
+ return next;
+ });
+ };
+
+ const handleSave = async () => {
+ if (!crawler || !form) return;
+
+ const parsedInputSchema = tryParseSchema(form.input_schema);
+ const parsedOutputSchema = tryParseSchema(form.output_schema);
+
+ if (parsedInputSchema === undefined) {
+ setSchemaErrors((previous) => ({ ...previous, input_schema: 'Must be a valid JSON object.' }));
+ showToast('Error', 'Input schema is not valid JSON.', 'error');
+ return;
+ }
+ if (parsedOutputSchema === undefined) {
+ setSchemaErrors((previous) => ({ ...previous, output_schema: 'Must be a valid JSON object.' }));
+ showToast('Error', 'Output schema is not valid JSON.', 'error');
+ return;
+ }
+
+ if (!trimmedName) {
+ showToast('Error', 'Name cannot be empty.', 'error');
+ return;
+ }
+
+ if (!form.code.trim()) {
+ showToast('Error', 'Code cannot be empty.', 'error');
+ return;
+ }
+
+ const trimmedURLPattern = form.url_pattern.trim();
+ if (crawler.type === 'web' && !trimmedURLPattern) {
+ showToast('Error', 'URL pattern cannot be empty for web crawlers.', 'error');
+ return;
+ }
+
+ try {
+ const payload =
+ crawler.type === 'data'
+ ? {
+ id: crawler.id,
+ type: 'data' as const,
+ name: trimmedName,
+ code: form.code,
+ input_schema: parsedInputSchema,
+ output_schema: parsedOutputSchema,
+ }
+ : {
+ id: crawler.id,
+ type: 'web' as const,
+ name: trimmedName,
+ url_pattern: trimmedURLPattern,
+ code: form.code,
+ output_schema: parsedOutputSchema,
+ };
+
+ const submittedForm = form;
+ const updated = await updateCrawler(payload);
+ const next = deriveFormState(updated);
+ setForm((current) => (current === submittedForm ? next : current));
+ setPristine(next);
+ showToast('Saved', `Crawler "${trimmedName}" has been updated.`, 'success');
+ } catch (saveError) {
+ const message = saveError instanceof Error ? saveError.message : 'Failed to save crawler';
+ showToast('Error', message, 'error');
+ }
+ };
+
+ return (
+
+
+
+
+ navigate('/crawlers')}>
+
+ Back to Crawlers
+
+
+ {isLoading ? (
+
+
+
+ ) : error ? (
+
+ Failed to load crawler.
+ refetch()}>
+
+ Retry
+
+
+ ) : crawler && form ? (
+ <>
+
+ setForm({ ...form, name: event.target.value })}
+ aria-label="Crawler name"
+ placeholder="Crawler name"
+ />
+
+
+ Type
+ {crawler.type}
+
+ {crawler.type === 'web' && (
+
+ URL Pattern
+ setForm({ ...form, url_pattern: event.target.value })}
+ aria-label="URL pattern"
+ placeholder="^https://example\.com"
+ />
+
+ )}
+
+ Created
+ {formatDateTime(crawler.created_at)}
+
+
+ Updated
+ {formatDateTime(crawler.updated_at)}
+
+
+
+
+
+
+ Code
+
+ setForm({ ...form, code: value })}
+ disabled={isSaving}
+ showDefaultTemplate={false}
+ />
+
+
+
+
+ Input Schema
+
+ handleSchemaChange('input_schema', event.target.value)}
+ onBlur={
+ crawler.type === 'web'
+ ? undefined
+ : (event) => validateSchemaField('input_schema', event.target.value)
+ }
+ readOnly={crawler.type === 'web'}
+ hasError={!!schemaErrors.input_schema}
+ aria-label="Input schema"
+ aria-invalid={!!schemaErrors.input_schema}
+ spellCheck={false}
+ />
+
+ {schemaErrors.input_schema
+ ?? (crawler.type === 'web'
+ ? 'Web crawlers receive the fetched page body — schema is fixed and not editable.'
+ : 'Enter a JSON object describing the input this crawler expects.')}
+
+
+
+
+
+ Output Schema
+
+ handleSchemaChange('output_schema', event.target.value)}
+ onBlur={(event) => validateSchemaField('output_schema', event.target.value)}
+ hasError={!!schemaErrors.output_schema}
+ aria-label="Output schema"
+ aria-invalid={!!schemaErrors.output_schema}
+ spellCheck={false}
+ />
+ {schemaErrors.output_schema && (
+ {schemaErrors.output_schema}
+ )}
+
+
+
+
+
+ Revert
+
+
+
+ {isSaving ? 'Saving...' : 'Save'}
+
+
+ >
+ ) : null}
+
+
+ );
+}
diff --git a/applications/web/sources/pages/CrawlersPage.tsx b/applications/web/sources/pages/CrawlersPage.tsx
index 522ccca..1d6cbcc 100644
--- a/applications/web/sources/pages/CrawlersPage.tsx
+++ b/applications/web/sources/pages/CrawlersPage.tsx
@@ -85,10 +85,16 @@ const CrawlerCard = styled.div`
border: 1px solid var(--border-subtle);
border-radius: 10px;
transition: var(--transition-fast);
+ cursor: pointer;
&:hover {
border-color: var(--border-focus);
}
+
+ &:focus-visible {
+ outline: 2px solid var(--border-focus);
+ outline-offset: 2px;
+ }
`;
const CrawlerInfo = styled.div`
@@ -312,7 +318,8 @@ export function CrawlersPage() {
const { deleteCrawler, status: deleteStatus } = useDeleteCrawler();
const [confirmTarget, setConfirmTarget] = useState<{ id: string; name: string }>();
- const handleDelete = (id: string, name: string) => {
+ const handleDelete = (event: React.MouseEvent, id: string, name: string) => {
+ event.stopPropagation();
setConfirmTarget({ id, name });
};
@@ -376,16 +383,29 @@ export function CrawlersPage() {
{crawlers.map((crawler) => (
-
+ navigate(`/crawlers/${crawler.id}`)}
+ onKeyDown={(event) => {
+ if (event.target !== event.currentTarget) return;
+ if (event.key === 'Enter' || event.key === ' ') {
+ if (event.key === ' ') event.preventDefault();
+ navigate(`/crawlers/${crawler.id}`);
+ }
+ }}
+ >
{crawler.name}
{crawler.url_pattern}
Created {formatDate(crawler.created_at)}
handleDelete(crawler.id, crawler.name)}
+ onClick={(event) => handleDelete(event, crawler.id, crawler.name)}
disabled={deleteStatus === 'pending'}
title="Delete crawler"
+ aria-label={`Delete crawler ${crawler.name}`}
>
diff --git a/applications/web/sources/tests/extensions.ts b/applications/web/sources/tests/extensions.ts
index 04b01d6..85f0d1b 100644
--- a/applications/web/sources/tests/extensions.ts
+++ b/applications/web/sources/tests/extensions.ts
@@ -3,7 +3,8 @@ import { worker } from './mocks/browser.ts';
export const test = testBase.extend({
worker: [
- async (_fixtures: Record, use: (value: typeof worker) => Promise) => {
+ // eslint-disable-next-line no-empty-pattern -- vitest v4 requires destructuring for fixture params
+ async ({}: Record, use: (value: typeof worker) => Promise) => {
await worker.start({ quiet: true });
await use(worker);
worker.resetHandlers();
diff --git a/functions/crawler-code-runner-function/tests/index.test.ts b/functions/crawler-code-runner-function/tests/index.test.ts
index fae66d5..43b4de4 100644
--- a/functions/crawler-code-runner-function/tests/index.test.ts
+++ b/functions/crawler-code-runner-function/tests/index.test.ts
@@ -455,7 +455,7 @@ describe('crawler-code-runner-function', () => {
expect(body.result).toBe('HELLO');
});
- it('normalizes undefined result to null', async () => {
+ it('omits result field when code returns undefined', async () => {
vi.stubGlobal('fetch', vi.fn().mockResolvedValue({
status: 200,
text: () => Promise.resolve('hello'),
@@ -477,7 +477,7 @@ describe('crawler-code-runner-function', () => {
expect(response.statusCode).toBe(200);
const body = JSON.parse(response.body);
- expect(body.result).toBeNull();
+ expect(body.result).toBeUndefined();
});
});
@@ -590,7 +590,7 @@ describe('crawler-code-runner-function', () => {
expect(body.result).toBe(true);
});
- it('normalizes undefined result to null for data type', async () => {
+ it('omits result field when code returns undefined for data type', async () => {
const event = createEvent({
method: 'POST',
path: '/run',
@@ -607,7 +607,7 @@ describe('crawler-code-runner-function', () => {
expect(response.statusCode).toBe(200);
const body = JSON.parse(response.body);
- expect(body.result).toBeNull();
+ expect(body.result).toBeUndefined();
});
it('does not perform SSRF check for data type', async () => {