You've already forked domainstack.io
mirror of
https://github.com/jakejarvis/domainstack.io.git
synced 2025-12-02 19:33:48 -05:00
feat: implement unified structured logging system with OpenTelemetry integration and correlation ID support (#197)
This commit is contained in:
38
AGENTS.md
38
AGENTS.md
@@ -9,6 +9,7 @@
|
||||
- `lib/inngest/` Inngest client and functions for event-driven background section revalidation.
|
||||
- `lib/db/` Drizzle ORM schema, migrations, and repository layer for Postgres persistence.
|
||||
- `lib/db/repos/` repository layer for each table (domains, certificates, dns, favicons, headers, hosting, providers, registrations, screenshots, seo).
|
||||
- `lib/logger/` unified structured logging system with OpenTelemetry integration, correlation IDs, and PII-safe field filtering.
|
||||
- `server/` backend integrations and tRPC routers; isolate DNS, RDAP/WHOIS, TLS, and header probing services.
|
||||
- `server/routers/` tRPC router definitions (`_app.ts` and domain-specific routers).
|
||||
- `server/services/` service layer for domain data fetching (DNS, certificates, headers, hosting, registration, SEO, screenshot, favicon, etc.).
|
||||
@@ -49,6 +50,7 @@
|
||||
- Uses `threads` pool for compatibility with sandboxed environments (e.g., Cursor agent commands).
|
||||
- Global setup in `vitest.setup.ts`:
|
||||
- Mocks analytics clients/servers (`@/lib/analytics/server` and `@/lib/analytics/client`).
|
||||
- Mocks logger clients/servers (`@/lib/logger/server` and `@/lib/logger/client`).
|
||||
- Mocks `server-only` module.
|
||||
- Database in tests: Drizzle client is not globally mocked. Replace `@/lib/db/client` with a PGlite-backed instance when needed (`@/lib/db/pglite`).
|
||||
- UI tests:
|
||||
@@ -91,3 +93,39 @@
|
||||
- Leverages Next.js 16 `after()` for background event capture with graceful degradation.
|
||||
- Distinct ID sourced from PostHog cookie via `cache()`-wrapped `getDistinctId()` to comply with Next.js restrictions.
|
||||
- Analytics mocked in tests via `vitest.setup.ts`.
|
||||
|
||||
## Structured Logging
|
||||
- Unified logging system in `lib/logger/` with server (`lib/logger/server.ts`) and client (`lib/logger/client.ts`) implementations.
|
||||
- **Server-side logging:**
|
||||
- Import singleton: `import { logger } from "@/lib/logger/server"`
|
||||
- Or create service logger: `const logger = createLogger({ source: "dns" })`
|
||||
- Automatic OpenTelemetry trace/span ID injection from `@vercel/otel`
|
||||
- Correlation ID tracking via AsyncLocalStorage for request tracing
|
||||
- Critical errors automatically tracked in PostHog via `after()`
|
||||
- Log levels: `trace`, `debug`, `info`, `warn`, `error`, `fatal`
|
||||
- **Client-side logging:**
|
||||
- Import singleton: `import { logger } from "@/lib/logger/client"`
|
||||
- Or use hook: `const logger = useLogger({ component: "MyComponent" })`
|
||||
- Errors automatically tracked in PostHog
|
||||
- Console output only in development (info/debug) and always for errors
|
||||
- Correlation IDs propagated from server via header/cookie/localStorage
|
||||
- **Log format:** Structured JSON with consistent fields (level, message, timestamp, context, correlationId, traceId, spanId, environment).
|
||||
- **Usage examples:**
|
||||
```typescript
|
||||
// Server (service layer)
|
||||
import { createLogger } from "@/lib/logger/server";
|
||||
const logger = createLogger({ source: "dns" });
|
||||
logger.debug("start example.com", { domain: "example.com" });
|
||||
logger.info("ok example.com", { domain: "example.com", count: 5 });
|
||||
logger.error("failed to resolve", error, { domain: "example.com" });
|
||||
|
||||
// Client (components)
|
||||
import { useLogger } from "@/hooks/use-logger";
|
||||
const logger = useLogger({ component: "DomainSearch" });
|
||||
logger.info("search initiated", { domain: query });
|
||||
logger.error("search failed", error, { domain: query });
|
||||
```
|
||||
- **Correlation IDs:** Generated server-side, propagated to client via `x-correlation-id` header, stored in cookie/localStorage. Enables request tracing across services.
|
||||
- **Integration with tRPC:** Middleware in `trpc/init.ts` automatically logs all procedures with correlation IDs and OpenTelemetry context.
|
||||
- **Testing:** Logger mocked in `vitest.setup.ts`. Use `vi.mocked(logger.info)` to assert log calls in tests.
|
||||
|
||||
|
||||
@@ -1,16 +1,34 @@
|
||||
import { fetchRequestHandler } from "@trpc/server/adapters/fetch";
|
||||
import { CORRELATION_ID_HEADER } from "@/lib/logger/correlation";
|
||||
import { appRouter } from "@/server/routers/_app";
|
||||
import { createContext } from "@/trpc/init";
|
||||
|
||||
const handler = (req: Request) =>
|
||||
fetchRequestHandler({
|
||||
const handler = async (req: Request) => {
|
||||
// Extract correlation ID from context to add to response headers
|
||||
const ctx = await createContext({ req });
|
||||
|
||||
return fetchRequestHandler({
|
||||
endpoint: "/api/trpc",
|
||||
req,
|
||||
router: appRouter,
|
||||
createContext: () => createContext({ req }),
|
||||
onError: ({ path, error }) => {
|
||||
console.error(`[trpc] unhandled error ${path}`, error);
|
||||
createContext: () => ctx,
|
||||
onError: async ({ path, error }) => {
|
||||
// Use logger for unhandled errors
|
||||
const { logger } = await import("@/lib/logger/server");
|
||||
logger.error(`[trpc] unhandled error ${path}`, error, { path });
|
||||
},
|
||||
responseMeta: () => {
|
||||
// Add correlation ID to response headers for client tracking
|
||||
if (ctx.correlationId) {
|
||||
return {
|
||||
headers: {
|
||||
[CORRELATION_ID_HEADER]: ctx.correlationId,
|
||||
},
|
||||
};
|
||||
}
|
||||
return {};
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
export { handler as GET, handler as POST };
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
"use client";
|
||||
|
||||
import { RefreshCcw } from "lucide-react";
|
||||
import posthog from "posthog-js";
|
||||
import { useEffect } from "react";
|
||||
import { CreateIssueButton } from "@/components/create-issue-button";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { logger } from "@/lib/logger/client";
|
||||
|
||||
export default function RootError(props: {
|
||||
error: Error & { digest?: string };
|
||||
@@ -13,7 +13,9 @@ export default function RootError(props: {
|
||||
const { error, reset } = props;
|
||||
|
||||
useEffect(() => {
|
||||
posthog.captureException(error);
|
||||
logger.error("Root error boundary caught error", error, {
|
||||
digest: error.digest,
|
||||
});
|
||||
}, [error]);
|
||||
|
||||
const isDev = process.env.NODE_ENV !== "production";
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
"use client";
|
||||
|
||||
import NextError from "next/error";
|
||||
import posthog from "posthog-js";
|
||||
import { useEffect } from "react";
|
||||
import { logger } from "@/lib/logger/client";
|
||||
|
||||
export default function GlobalError({
|
||||
error,
|
||||
@@ -12,7 +12,7 @@ export default function GlobalError({
|
||||
reset: () => void;
|
||||
}) {
|
||||
useEffect(() => {
|
||||
posthog.captureException(error);
|
||||
logger.error("Global error boundary caught error", error);
|
||||
}, [error]);
|
||||
|
||||
return (
|
||||
|
||||
@@ -1,7 +1,4 @@
|
||||
/**
|
||||
* @vitest-environment jsdom
|
||||
*/
|
||||
|
||||
/* @vitest-environment jsdom */
|
||||
import { QueryClient, QueryClientProvider } from "@tanstack/react-query";
|
||||
import { render, screen } from "@testing-library/react";
|
||||
import { describe, expect, it, vi } from "vitest";
|
||||
|
||||
@@ -6,6 +6,7 @@ import type { ReactNode } from "react";
|
||||
import { Component } from "react";
|
||||
import { Alert, AlertDescription, AlertTitle } from "@/components/ui/alert";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { createLogger } from "@/lib/logger/client";
|
||||
|
||||
interface Props {
|
||||
children: ReactNode;
|
||||
@@ -22,6 +23,8 @@ interface State {
|
||||
* Catches rendering errors and provides a fallback UI without crashing the entire page.
|
||||
*/
|
||||
export class SectionErrorBoundary extends Component<Props, State> {
|
||||
private logger = createLogger({ component: "SectionErrorBoundary" });
|
||||
|
||||
constructor(props: Props) {
|
||||
super(props);
|
||||
this.state = { hasError: false, error: null };
|
||||
@@ -38,14 +41,10 @@ export class SectionErrorBoundary extends Component<Props, State> {
|
||||
componentStack: errorInfo.componentStack,
|
||||
});
|
||||
|
||||
// Also log to console in development
|
||||
if (process.env.NODE_ENV === "development") {
|
||||
console.error(
|
||||
`[SectionErrorBoundary] Error in ${this.props.sectionName}:`,
|
||||
error,
|
||||
errorInfo,
|
||||
);
|
||||
}
|
||||
this.logger.error("render error", error, {
|
||||
section: this.props.sectionName,
|
||||
componentStack: errorInfo.componentStack,
|
||||
});
|
||||
}
|
||||
|
||||
render() {
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { notifyManager, useQueryClient } from "@tanstack/react-query";
|
||||
import { useCallback, useEffect, useRef, useState } from "react";
|
||||
import { toast } from "sonner";
|
||||
import { useLogger } from "@/hooks/use-logger";
|
||||
import { analytics } from "@/lib/analytics/client";
|
||||
import { exportDomainData } from "@/lib/json-export";
|
||||
|
||||
@@ -19,6 +20,7 @@ type QueryKeys = {
|
||||
*/
|
||||
export function useDomainExport(domain: string, queryKeys: QueryKeys) {
|
||||
const queryClient = useQueryClient();
|
||||
const logger = useLogger({ component: "DomainExport" });
|
||||
const [allDataLoaded, setAllDataLoaded] = useState(false);
|
||||
const queryKeysRef = useRef(queryKeys);
|
||||
|
||||
@@ -81,26 +83,19 @@ export function useDomainExport(domain: string, queryKeys: QueryKeys) {
|
||||
|
||||
// Export with partial data (graceful degradation)
|
||||
exportDomainData(domain, exportData);
|
||||
} catch (error) {
|
||||
console.error("[export] failed to export domain data", error);
|
||||
|
||||
analytics.trackException(
|
||||
error instanceof Error ? error : new Error(String(error)),
|
||||
{
|
||||
domain,
|
||||
},
|
||||
);
|
||||
} catch (err) {
|
||||
logger.error("failed to export domain data", err, { domain });
|
||||
|
||||
// Show error toast
|
||||
toast.error(`Failed to export ${domain}`, {
|
||||
description:
|
||||
error instanceof Error
|
||||
? error.message
|
||||
err instanceof Error
|
||||
? err.message
|
||||
: "An error occurred while exporting",
|
||||
position: "bottom-center",
|
||||
});
|
||||
}
|
||||
}, [domain, queryClient, queryKeys]);
|
||||
}, [domain, queryClient, queryKeys, logger]);
|
||||
|
||||
return { handleExport, allDataLoaded };
|
||||
}
|
||||
|
||||
50
hooks/use-logger.ts
Normal file
50
hooks/use-logger.ts
Normal file
@@ -0,0 +1,50 @@
|
||||
"use client";
|
||||
|
||||
import { useMemo } from "react";
|
||||
import { logger as clientLogger, createLogger } from "@/lib/logger/client";
|
||||
import type { LogContext, Logger } from "@/lib/logger/index";
|
||||
|
||||
/**
|
||||
* React hook for component-level logging.
|
||||
*
|
||||
* Creates a memoized logger instance with component-specific context.
|
||||
* The logger automatically includes the correlation ID and any provided context.
|
||||
*
|
||||
* @param baseContext - Optional context to be included with all logs from this logger
|
||||
* @returns Logger instance
|
||||
*
|
||||
* @example
|
||||
* ```tsx
|
||||
* function DomainSearch() {
|
||||
* const logger = useLogger({ component: "DomainSearch" });
|
||||
*
|
||||
* const handleSearch = (query: string) => {
|
||||
* logger.info("search_initiated", { query });
|
||||
* // ... search logic
|
||||
* };
|
||||
*
|
||||
* return <input onChange={(e) => handleSearch(e.target.value)} />;
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
export function useLogger(baseContext?: LogContext): Logger {
|
||||
// Generate a stable key for the context to prevent logger recreation on every render
|
||||
// when using inline object literals (e.g. useLogger({ component: "..." })).
|
||||
// We use JSON.stringify as it handles the most common case of simple value objects.
|
||||
let contextKey: string | LogContext | undefined;
|
||||
try {
|
||||
contextKey = baseContext ? JSON.stringify(baseContext) : undefined;
|
||||
} catch {
|
||||
// Fallback to object reference if serialization fails (e.g. circular refs)
|
||||
contextKey = baseContext;
|
||||
}
|
||||
|
||||
// biome-ignore lint/correctness/useExhaustiveDependencies: We use contextKey to control memoization, but we need baseContext for creation. Since equal keys imply equal content (for serializable objects), using the captured baseContext from the first render that produced this key is safe.
|
||||
return useMemo(() => {
|
||||
if (baseContext) {
|
||||
return createLogger(baseContext);
|
||||
}
|
||||
// Return singleton logger if no context provided
|
||||
return clientLogger;
|
||||
}, [contextKey]);
|
||||
}
|
||||
@@ -12,20 +12,18 @@ export const onRequestError: Instrumentation.onRequestError = async (
|
||||
// Only track errors in Node.js runtime (not Edge)
|
||||
if (process.env.NEXT_RUNTIME === "nodejs") {
|
||||
try {
|
||||
// Dynamic imports for Node.js-only code
|
||||
const { analytics } = await import("@/lib/analytics/server");
|
||||
|
||||
// Note: we let analytics.trackException handle distinctId extraction from cookies
|
||||
analytics.trackException(
|
||||
// Use logger for structured error logging
|
||||
const { logger } = await import("@/lib/logger/server");
|
||||
logger.error(
|
||||
"[instrumentation] request error",
|
||||
error instanceof Error ? error : new Error(String(error)),
|
||||
{
|
||||
path: request.path,
|
||||
method: request.method,
|
||||
},
|
||||
);
|
||||
} catch (trackingError) {
|
||||
} catch {
|
||||
// Graceful degradation - don't throw to avoid breaking the request
|
||||
console.error("[instrumentation] error tracking failed:", trackingError);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
@@ -11,6 +11,9 @@ function track(event: string, properties?: Record<string, unknown>) {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @internal Use logger.error() instead, which automatically tracks exceptions.
|
||||
*/
|
||||
function trackException(error: Error, properties?: Record<string, unknown>) {
|
||||
try {
|
||||
posthog.captureException(error, properties);
|
||||
|
||||
@@ -102,6 +102,9 @@ export const analytics = {
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* @internal Use logger.error() instead, which automatically tracks exceptions.
|
||||
*/
|
||||
trackException: (
|
||||
error: Error,
|
||||
properties: Record<string, unknown>,
|
||||
|
||||
10
lib/blob.ts
10
lib/blob.ts
@@ -1,6 +1,9 @@
|
||||
import "server-only";
|
||||
|
||||
import { del, put } from "@vercel/blob";
|
||||
import { createLogger } from "@/lib/logger/server";
|
||||
|
||||
const logger = createLogger({ source: "blob" });
|
||||
|
||||
/**
|
||||
* Upload a buffer to Vercel Blob storage
|
||||
@@ -44,9 +47,12 @@ export async function deleteBlobs(urls: string[]): Promise<DeleteResult> {
|
||||
results.push({ url, deleted: true });
|
||||
} catch (err) {
|
||||
const message = (err as Error)?.message || "unknown";
|
||||
console.error(
|
||||
`[blob] delete failed ${url}`,
|
||||
logger.error(
|
||||
"delete failed",
|
||||
err instanceof Error ? err : new Error(String(err)),
|
||||
{
|
||||
url,
|
||||
},
|
||||
);
|
||||
results.push({ url, deleted: false, error: message });
|
||||
}
|
||||
|
||||
@@ -4,6 +4,9 @@ import * as ipaddr from "ipaddr.js";
|
||||
import { cacheLife, cacheTag } from "next/cache";
|
||||
import { CLOUDFLARE_IPS_URL } from "@/lib/constants/external-apis";
|
||||
import { ipV4InCidr, ipV6InCidr } from "@/lib/ip";
|
||||
import { createLogger } from "@/lib/logger/server";
|
||||
|
||||
const logger = createLogger({ source: "cloudflare-ips" });
|
||||
|
||||
export interface CloudflareIpRanges {
|
||||
ipv4Cidrs: string[];
|
||||
@@ -87,13 +90,10 @@ async function getCloudflareIpRanges(): Promise<CloudflareIpRanges> {
|
||||
try {
|
||||
const ranges = await fetchCloudflareIpRanges();
|
||||
parseAndCacheRanges(ranges);
|
||||
console.info("[cloudflare-ips] IP ranges fetched");
|
||||
logger.info("IP ranges fetched");
|
||||
return ranges;
|
||||
} catch (err) {
|
||||
console.error(
|
||||
"[cloudflare-ips] fetch error",
|
||||
err instanceof Error ? err : new Error(String(err)),
|
||||
);
|
||||
logger.error("fetch error", err);
|
||||
// Return empty ranges on error
|
||||
return { ipv4Cidrs: [], ipv6Cidrs: [] };
|
||||
}
|
||||
|
||||
@@ -1,12 +1,15 @@
|
||||
import { PGlite } from "@electric-sql/pglite";
|
||||
import { drizzle } from "drizzle-orm/pglite";
|
||||
import * as schema from "@/lib/db/schema";
|
||||
import { createLogger } from "@/lib/logger/server";
|
||||
|
||||
// Dynamic import via require pattern is recommended in community examples
|
||||
// to access drizzle-kit/api in Vitest.
|
||||
const { pushSchema } =
|
||||
require("drizzle-kit/api") as typeof import("drizzle-kit/api");
|
||||
|
||||
const logger = createLogger({ source: "pglite" });
|
||||
|
||||
type DbBundle = { db: ReturnType<typeof drizzle>; client: PGlite };
|
||||
let cached: DbBundle | null = null;
|
||||
let schemaApplied = false;
|
||||
@@ -27,16 +30,17 @@ export async function makePGliteDb(): Promise<DbBundle> {
|
||||
cached.db as any,
|
||||
);
|
||||
// Silence noisy logs printed by drizzle-kit during schema sync in tests
|
||||
const origLog = console.log;
|
||||
const consoleObj = globalThis.console;
|
||||
const origLog = consoleObj.log;
|
||||
try {
|
||||
console.log = (...args: unknown[]) => {
|
||||
consoleObj.log = (...args: unknown[]) => {
|
||||
const s = String(args[0] ?? "");
|
||||
if (s.includes("Pulling schema from database")) return;
|
||||
origLog(...args);
|
||||
origLog.apply(consoleObj, args as unknown[]);
|
||||
};
|
||||
await apply();
|
||||
} finally {
|
||||
console.log = origLog;
|
||||
consoleObj.log = origLog;
|
||||
}
|
||||
schemaApplied = true;
|
||||
}
|
||||
@@ -52,9 +56,9 @@ export async function closePGliteDb(): Promise<void> {
|
||||
if (!cached) return;
|
||||
try {
|
||||
await cached.client.close();
|
||||
} catch (error) {
|
||||
} catch (err) {
|
||||
// Swallow errors on close (client may already be closed)
|
||||
console.warn("PGlite close warning:", error);
|
||||
logger.error("close warning", err);
|
||||
} finally {
|
||||
cached = null;
|
||||
schemaApplied = false;
|
||||
|
||||
@@ -4,6 +4,9 @@ import { and, eq, isNull, lt, or } from "drizzle-orm";
|
||||
import { getDomainTld } from "rdapper";
|
||||
import { db } from "@/lib/db/client";
|
||||
import { domains } from "@/lib/db/schema";
|
||||
import { createLogger } from "@/lib/logger/server";
|
||||
|
||||
const logger = createLogger({ source: "domains" });
|
||||
|
||||
/**
|
||||
* Debounce interval for updating domain lastAccessedAt timestamp.
|
||||
@@ -108,9 +111,6 @@ export async function updateLastAccessed(name: string): Promise<void> {
|
||||
),
|
||||
);
|
||||
} catch (err) {
|
||||
console.warn(
|
||||
`[access] failed to update lastAccessedAt for ${name}`,
|
||||
err instanceof Error ? err.message : String(err),
|
||||
);
|
||||
logger.error("failed to update lastAccessedAt", err, { domain: name });
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,8 +2,11 @@ import "server-only";
|
||||
import { and, desc, eq, or, sql } from "drizzle-orm";
|
||||
import { db } from "@/lib/db/client";
|
||||
import { type providerCategory, providers } from "@/lib/db/schema";
|
||||
import { createLogger } from "@/lib/logger/server";
|
||||
import { slugify } from "@/lib/slugify";
|
||||
|
||||
const logger = createLogger({ source: "providers" });
|
||||
|
||||
export type ResolveProviderInput = {
|
||||
category: (typeof providerCategory.enumValues)[number];
|
||||
domain?: string | null;
|
||||
@@ -322,7 +325,10 @@ export async function batchResolveOrCreateProviderIds(
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
console.warn("[providers] batch insert partial failure", err);
|
||||
logger.error("batch insert partial failure", err, {
|
||||
count: toCreate.length,
|
||||
});
|
||||
|
||||
// Fall back to individual resolution for failed items
|
||||
for (const input of toCreate) {
|
||||
const domainKey = makeProviderKey(
|
||||
|
||||
@@ -1,4 +1,7 @@
|
||||
import { get } from "@vercel/edge-config";
|
||||
import { createLogger } from "@/lib/logger/server";
|
||||
|
||||
const logger = createLogger({ source: "edge-config" });
|
||||
|
||||
/**
|
||||
* Fetches the default domain suggestions from Vercel Edge Config.
|
||||
@@ -34,12 +37,9 @@ export async function getDefaultSuggestions(): Promise<string[]> {
|
||||
|
||||
// Return the suggestions if they exist, otherwise empty array
|
||||
return suggestions ?? [];
|
||||
} catch (error) {
|
||||
} catch (err) {
|
||||
// Log the error but fail gracefully
|
||||
console.error(
|
||||
"[edge-config] failed to fetch domain suggestions",
|
||||
error instanceof Error ? error.message : String(error),
|
||||
);
|
||||
logger.error("failed to fetch domain suggestions", err);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,9 @@
|
||||
import { lookup as dnsLookup } from "node:dns/promises";
|
||||
import { isIP } from "node:net";
|
||||
import * as ipaddr from "ipaddr.js";
|
||||
import { createLogger } from "@/lib/logger/server";
|
||||
|
||||
const logger = createLogger({ source: "remote-asset" });
|
||||
|
||||
// Hosts that should never be fetched regardless of DNS (fast path).
|
||||
const BLOCKED_HOSTNAMES = new Set(["localhost"]);
|
||||
@@ -112,7 +115,7 @@ export async function fetchRemoteAsset(
|
||||
`Remote asset request failed with ${response.status}`,
|
||||
response.status,
|
||||
);
|
||||
console.warn("[remote-asset] response error", {
|
||||
logger.warn("response error", {
|
||||
url: currentUrl.toString(),
|
||||
reason: error.message,
|
||||
});
|
||||
@@ -127,7 +130,7 @@ export async function fetchRemoteAsset(
|
||||
"size_exceeded",
|
||||
`Remote asset declared size ${declared} exceeds limit ${maxBytes}`,
|
||||
);
|
||||
console.warn("[remote-asset] size exceeded", {
|
||||
logger.warn("size exceeded", {
|
||||
url: currentUrl.toString(),
|
||||
reason: error.message,
|
||||
});
|
||||
@@ -183,7 +186,7 @@ async function ensureUrlAllowed(
|
||||
BLOCKED_HOSTNAMES.has(hostname) ||
|
||||
BLOCKED_SUFFIXES.some((suffix) => hostname.endsWith(suffix))
|
||||
) {
|
||||
console.warn("[remote-asset] blocked host", {
|
||||
logger.warn("blocked host", {
|
||||
url: url.toString(),
|
||||
reason: "host_blocked",
|
||||
});
|
||||
@@ -194,7 +197,7 @@ async function ensureUrlAllowed(
|
||||
options.allowedHosts.length > 0 &&
|
||||
!options.allowedHosts.includes(hostname)
|
||||
) {
|
||||
console.warn("[remote-asset] blocked host", {
|
||||
logger.warn("blocked host", {
|
||||
url: url.toString(),
|
||||
reason: "host_not_allowed",
|
||||
});
|
||||
@@ -206,7 +209,7 @@ async function ensureUrlAllowed(
|
||||
|
||||
if (isIP(hostname)) {
|
||||
if (isBlockedIp(hostname)) {
|
||||
console.warn("[remote-asset] blocked ip", {
|
||||
logger.warn("blocked ip", {
|
||||
url: url.toString(),
|
||||
reason: "private_ip",
|
||||
});
|
||||
@@ -222,7 +225,7 @@ async function ensureUrlAllowed(
|
||||
try {
|
||||
records = await dnsLookup(hostname, { all: true });
|
||||
} catch (err) {
|
||||
console.warn("[remote-asset] dns error", {
|
||||
logger.warn("dns error", {
|
||||
url: url.toString(),
|
||||
reason: err instanceof Error ? err.message : "dns_error",
|
||||
});
|
||||
@@ -233,7 +236,7 @@ async function ensureUrlAllowed(
|
||||
}
|
||||
|
||||
if (!records || records.length === 0) {
|
||||
console.warn("[remote-asset] dns error", {
|
||||
logger.warn("dns error", {
|
||||
url: url.toString(),
|
||||
reason: "no_records",
|
||||
});
|
||||
@@ -241,7 +244,7 @@ async function ensureUrlAllowed(
|
||||
}
|
||||
|
||||
if (records.some((record) => isBlockedIp(record.address))) {
|
||||
console.warn("[remote-asset] blocked ip", {
|
||||
logger.warn("blocked ip", {
|
||||
url: url.toString(),
|
||||
reason: "private_ip",
|
||||
});
|
||||
|
||||
14
lib/fetch.ts
14
lib/fetch.ts
@@ -1,3 +1,7 @@
|
||||
import { createLogger } from "@/lib/logger/server";
|
||||
|
||||
const logger = createLogger({ source: "fetch" });
|
||||
|
||||
/**
|
||||
* Fetch a trusted upstream resource with a timeout and optional retries.
|
||||
* Do not use this for user-controlled URLs; prefer the hardened remote asset helper.
|
||||
@@ -26,6 +30,14 @@ export async function fetchWithTimeoutAndRetry(
|
||||
throw err instanceof Error ? err : new Error("fetch aborted");
|
||||
}
|
||||
if (attempt < retries) {
|
||||
logger.warn(
|
||||
`fetch failed, retrying (attempt ${attempt + 1}/${retries})`,
|
||||
{
|
||||
url: input.toString(),
|
||||
error: err,
|
||||
},
|
||||
);
|
||||
|
||||
// Simple linear backoff — good enough for trusted upstream retry logic.
|
||||
await new Promise((r) => setTimeout(r, backoffMs));
|
||||
}
|
||||
@@ -150,7 +162,7 @@ function createAbortSignal(
|
||||
fn();
|
||||
} catch (err) {
|
||||
// Ignore cleanup errors to ensure all cleanup functions run
|
||||
console.warn("Cleanup error:", err);
|
||||
logger.error("cleanup error", err);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
205
lib/logger/client.ts
Normal file
205
lib/logger/client.ts
Normal file
@@ -0,0 +1,205 @@
|
||||
"use client";
|
||||
|
||||
import { getOrGenerateClientCorrelationId } from "@/lib/logger/correlation";
|
||||
import {
|
||||
createLogEntry,
|
||||
formatLogEntry,
|
||||
type LogContext,
|
||||
type Logger,
|
||||
type LogLevel,
|
||||
shouldLog,
|
||||
} from "@/lib/logger/index";
|
||||
|
||||
/**
|
||||
* Client-side logger with PostHog integration.
|
||||
*
|
||||
* Features:
|
||||
* - Console output for debug/info in development
|
||||
* - PostHog error tracking for exceptions
|
||||
* - Correlation ID support
|
||||
* - Browser context (user agent, viewport)
|
||||
* - Graceful degradation (never crashes)
|
||||
*/
|
||||
|
||||
// ============================================================================
|
||||
// Logger Implementation
|
||||
// ============================================================================
|
||||
|
||||
class ClientLogger implements Logger {
|
||||
private minLevel: LogLevel;
|
||||
private correlationId: string | undefined;
|
||||
|
||||
constructor(minLevel?: LogLevel) {
|
||||
// Default to environment-based level
|
||||
this.minLevel =
|
||||
minLevel || (process.env.NODE_ENV === "development" ? "debug" : "info");
|
||||
|
||||
// Initialize correlation ID (lazy loaded on first use)
|
||||
if (typeof window !== "undefined") {
|
||||
try {
|
||||
this.correlationId = getOrGenerateClientCorrelationId();
|
||||
} catch {
|
||||
// Gracefully handle any errors
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private log(level: LogLevel, message: string, context?: LogContext): void {
|
||||
if (!shouldLog(level, this.minLevel)) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const entry = createLogEntry(level, message, {
|
||||
context,
|
||||
correlationId: this.correlationId,
|
||||
});
|
||||
|
||||
const formatted = formatLogEntry(entry);
|
||||
|
||||
// Output to appropriate console method
|
||||
// Only log to console in development to avoid noise in production
|
||||
if (process.env.NODE_ENV === "development") {
|
||||
switch (level) {
|
||||
case "trace":
|
||||
case "debug":
|
||||
console.debug(formatted);
|
||||
break;
|
||||
case "info":
|
||||
console.info(formatted);
|
||||
break;
|
||||
case "warn":
|
||||
console.warn(formatted);
|
||||
break;
|
||||
case "error":
|
||||
case "fatal":
|
||||
console.error(formatted);
|
||||
break;
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
// Logging should never crash the application
|
||||
console.error("[logger] failed to log:", err);
|
||||
}
|
||||
}
|
||||
|
||||
private logWithError(
|
||||
level: LogLevel,
|
||||
message: string,
|
||||
error?: Error | unknown,
|
||||
context?: LogContext,
|
||||
): void {
|
||||
if (!shouldLog(level, this.minLevel)) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const entry = createLogEntry(level, message, {
|
||||
context,
|
||||
error,
|
||||
correlationId: this.correlationId,
|
||||
});
|
||||
|
||||
const formatted = formatLogEntry(entry);
|
||||
|
||||
// Always output errors to console (even in production for debugging)
|
||||
console.error(formatted);
|
||||
|
||||
// Track errors in PostHog
|
||||
if ((level === "error" || level === "fatal") && error instanceof Error) {
|
||||
this.trackErrorInPostHog(error, context);
|
||||
}
|
||||
} catch (err) {
|
||||
// Logging should never crash the application
|
||||
console.error("[logger] failed to log:", err);
|
||||
}
|
||||
}
|
||||
|
||||
private trackErrorInPostHog(error: Error, context?: LogContext): void {
|
||||
try {
|
||||
// Dynamically import analytics to avoid circular dependencies
|
||||
import("@/lib/analytics/client")
|
||||
.then(({ analytics }) => {
|
||||
analytics.trackException(error, {
|
||||
...context,
|
||||
correlationId: this.correlationId,
|
||||
source: "logger",
|
||||
});
|
||||
})
|
||||
.catch(() => {
|
||||
// Graceful degradation - don't throw if PostHog fails
|
||||
});
|
||||
} catch {
|
||||
// Silently fail if analytics not available
|
||||
}
|
||||
}
|
||||
|
||||
trace(message: string, context?: LogContext): void {
|
||||
this.log("trace", message, context);
|
||||
}
|
||||
|
||||
debug(message: string, context?: LogContext): void {
|
||||
this.log("debug", message, context);
|
||||
}
|
||||
|
||||
info(message: string, context?: LogContext): void {
|
||||
this.log("info", message, context);
|
||||
}
|
||||
|
||||
warn(message: string, context?: LogContext): void {
|
||||
this.log("warn", message, context);
|
||||
}
|
||||
|
||||
error(message: string, error?: Error | unknown, context?: LogContext): void {
|
||||
this.logWithError("error", message, error, context);
|
||||
}
|
||||
|
||||
fatal(message: string, error?: Error | unknown, context?: LogContext): void {
|
||||
this.logWithError("fatal", message, error, context);
|
||||
}
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Exports
|
||||
// ============================================================================
|
||||
|
||||
/**
|
||||
* Singleton client logger instance.
|
||||
* Use this for all client-side logging.
|
||||
*
|
||||
* @example
|
||||
* ```typescript
|
||||
* import { logger } from "@/lib/logger/client";
|
||||
*
|
||||
* logger.info("Button clicked", { button: "export" });
|
||||
* logger.error("Export failed", error, { format: "json" });
|
||||
* ```
|
||||
*/
|
||||
export const logger = new ClientLogger();
|
||||
|
||||
/**
|
||||
* Create a child logger with a specific context prefix.
|
||||
* Useful for component-specific logging.
|
||||
*
|
||||
* @example
|
||||
* ```typescript
|
||||
* const searchLogger = createLogger({ component: "DomainSearch" });
|
||||
* searchLogger.debug("Query submitted", { domain: "example.com" });
|
||||
* ```
|
||||
*/
|
||||
export function createLogger(baseContext: LogContext): Logger {
|
||||
return {
|
||||
trace: (message: string, context?: LogContext) =>
|
||||
logger.trace(message, { ...baseContext, ...context }),
|
||||
debug: (message: string, context?: LogContext) =>
|
||||
logger.debug(message, { ...baseContext, ...context }),
|
||||
info: (message: string, context?: LogContext) =>
|
||||
logger.info(message, { ...baseContext, ...context }),
|
||||
warn: (message: string, context?: LogContext) =>
|
||||
logger.warn(message, { ...baseContext, ...context }),
|
||||
error: (message: string, error?: Error | unknown, context?: LogContext) =>
|
||||
logger.error(message, error, { ...baseContext, ...context }),
|
||||
fatal: (message: string, error?: Error | unknown, context?: LogContext) =>
|
||||
logger.fatal(message, error, { ...baseContext, ...context }),
|
||||
};
|
||||
}
|
||||
131
lib/logger/correlation.ts
Normal file
131
lib/logger/correlation.ts
Normal file
@@ -0,0 +1,131 @@
|
||||
import { v4 as uuidv4 } from "uuid";
|
||||
|
||||
/**
|
||||
* Correlation ID utilities for request tracing.
|
||||
*
|
||||
* Correlation IDs allow tracking a request across multiple services and layers.
|
||||
* They are generated server-side and propagated to the client.
|
||||
*/
|
||||
|
||||
// ============================================================================
|
||||
// Constants
|
||||
// ============================================================================
|
||||
|
||||
export const CORRELATION_ID_HEADER = "x-correlation-id";
|
||||
export const CORRELATION_ID_COOKIE = "x-correlation-id";
|
||||
export const CORRELATION_ID_STORAGE_KEY = "correlationId";
|
||||
|
||||
// ============================================================================
|
||||
// Generation
|
||||
// ============================================================================
|
||||
|
||||
/**
|
||||
* Generate a new correlation ID (UUID v4).
|
||||
*/
|
||||
export function generateCorrelationId(): string {
|
||||
return uuidv4();
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract correlation ID from headers or generate a new one.
|
||||
* Server-side only.
|
||||
*/
|
||||
export function getOrGenerateCorrelationId(headers: Headers): string {
|
||||
const existing = headers.get(CORRELATION_ID_HEADER);
|
||||
return existing || generateCorrelationId();
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Client-side Storage
|
||||
// ============================================================================
|
||||
|
||||
/**
|
||||
* Get correlation ID from localStorage (client-side only).
|
||||
* Returns undefined if not available or if running server-side.
|
||||
*/
|
||||
export function getCorrelationIdFromStorage(): string | undefined {
|
||||
if (typeof window === "undefined") {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
try {
|
||||
return localStorage.getItem(CORRELATION_ID_STORAGE_KEY) || undefined;
|
||||
} catch {
|
||||
// localStorage not available or blocked
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Store correlation ID in localStorage (client-side only).
|
||||
*/
|
||||
export function setCorrelationIdInStorage(id: string): void {
|
||||
if (typeof window === "undefined") {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
localStorage.setItem(CORRELATION_ID_STORAGE_KEY, id);
|
||||
} catch {
|
||||
// localStorage not available or blocked - gracefully ignore
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get correlation ID from cookie (client-side).
|
||||
* Returns undefined if not available.
|
||||
*/
|
||||
export function getCorrelationIdFromCookie(): string | undefined {
|
||||
if (typeof document === "undefined") {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
try {
|
||||
const cookies = document.cookie.split(";");
|
||||
for (const cookie of cookies) {
|
||||
const trimmed = cookie.trim();
|
||||
const separatorIndex = trimmed.indexOf("=");
|
||||
|
||||
if (separatorIndex === -1) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const name = trimmed.substring(0, separatorIndex);
|
||||
if (name === CORRELATION_ID_COOKIE) {
|
||||
const value = trimmed.substring(separatorIndex + 1);
|
||||
if (value) {
|
||||
return decodeURIComponent(value);
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
// Cookie parsing failed
|
||||
}
|
||||
|
||||
return undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get or generate correlation ID for client-side logging.
|
||||
* Priority: cookie > localStorage > generate new
|
||||
*/
|
||||
export function getOrGenerateClientCorrelationId(): string {
|
||||
// Try cookie first (set by server)
|
||||
const fromCookie = getCorrelationIdFromCookie();
|
||||
if (fromCookie) {
|
||||
// Store in localStorage for persistence
|
||||
setCorrelationIdInStorage(fromCookie);
|
||||
return fromCookie;
|
||||
}
|
||||
|
||||
// Try localStorage
|
||||
const fromStorage = getCorrelationIdFromStorage();
|
||||
if (fromStorage) {
|
||||
return fromStorage;
|
||||
}
|
||||
|
||||
// Generate new and store
|
||||
const newId = generateCorrelationId();
|
||||
setCorrelationIdInStorage(newId);
|
||||
return newId;
|
||||
}
|
||||
115
lib/logger/index.test.ts
Normal file
115
lib/logger/index.test.ts
Normal file
@@ -0,0 +1,115 @@
|
||||
import { describe, expect, it } from "vitest";
|
||||
import {
|
||||
createLogEntry,
|
||||
formatLogEntry,
|
||||
getMinLogLevel,
|
||||
parseLogLevel,
|
||||
serializeError,
|
||||
shouldLog,
|
||||
} from "./index";
|
||||
|
||||
describe("Logger Core", () => {
|
||||
describe("shouldLog", () => {
|
||||
it("filters logs based on minimum level", () => {
|
||||
expect(shouldLog("trace", "info")).toBe(false);
|
||||
expect(shouldLog("debug", "info")).toBe(false);
|
||||
expect(shouldLog("info", "info")).toBe(true);
|
||||
expect(shouldLog("warn", "info")).toBe(true);
|
||||
expect(shouldLog("error", "info")).toBe(true);
|
||||
expect(shouldLog("fatal", "info")).toBe(true);
|
||||
});
|
||||
|
||||
it("respects environment-based minimum level", () => {
|
||||
// In test environment, min level should be "warn"
|
||||
expect(getMinLogLevel()).toBe("warn");
|
||||
});
|
||||
});
|
||||
|
||||
describe("parseLogLevel", () => {
|
||||
it("returns the level if valid", () => {
|
||||
expect(parseLogLevel("info")).toBe("info");
|
||||
expect(parseLogLevel("debug")).toBe("debug");
|
||||
});
|
||||
|
||||
it("returns undefined if invalid", () => {
|
||||
expect(parseLogLevel("invalid")).toBeUndefined();
|
||||
expect(parseLogLevel("")).toBeUndefined();
|
||||
expect(parseLogLevel(undefined)).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe("serializeError", () => {
|
||||
it("serializes Error objects", () => {
|
||||
const error = new Error("Test error");
|
||||
error.cause = "Root cause";
|
||||
|
||||
const serialized = serializeError(error);
|
||||
expect(serialized.name).toBe("Error");
|
||||
expect(serialized.message).toBe("Test error");
|
||||
expect(serialized.stack).toBeDefined();
|
||||
expect(serialized.cause).toBe("Root cause");
|
||||
});
|
||||
|
||||
it("handles non-Error objects", () => {
|
||||
const serialized = serializeError("string error");
|
||||
expect(serialized.name).toBe("UnknownError");
|
||||
expect(serialized.message).toBe("string error");
|
||||
});
|
||||
});
|
||||
|
||||
describe("createLogEntry", () => {
|
||||
it("creates a basic log entry", () => {
|
||||
const entry = createLogEntry("info", "Test message");
|
||||
|
||||
expect(entry.level).toBe("info");
|
||||
expect(entry.message).toBe("Test message");
|
||||
expect(entry.timestamp).toBeDefined();
|
||||
expect(entry.environment).toBe("test");
|
||||
});
|
||||
|
||||
it("includes context", () => {
|
||||
const entry = createLogEntry("info", "Test", {
|
||||
context: { domain: "example.com" },
|
||||
});
|
||||
|
||||
expect(entry.context).toEqual({
|
||||
domain: "example.com",
|
||||
});
|
||||
});
|
||||
|
||||
it("includes serialized error", () => {
|
||||
const error = new Error("Test error");
|
||||
const entry = createLogEntry("error", "Error occurred", { error });
|
||||
|
||||
expect(entry.error?.name).toBe("Error");
|
||||
expect(entry.error?.message).toBe("Test error");
|
||||
});
|
||||
|
||||
it("includes correlation and trace IDs", () => {
|
||||
const entry = createLogEntry("info", "Test", {
|
||||
correlationId: "corr-123",
|
||||
traceId: "trace-456",
|
||||
spanId: "span-789",
|
||||
});
|
||||
|
||||
expect(entry.correlationId).toBe("corr-123");
|
||||
expect(entry.traceId).toBe("trace-456");
|
||||
expect(entry.spanId).toBe("span-789");
|
||||
});
|
||||
});
|
||||
|
||||
describe("formatLogEntry", () => {
|
||||
it("formats log entry as JSON string", () => {
|
||||
const entry = createLogEntry("info", "Test message", {
|
||||
context: { domain: "example.com" },
|
||||
});
|
||||
|
||||
const formatted = formatLogEntry(entry);
|
||||
const parsed = JSON.parse(formatted);
|
||||
|
||||
expect(parsed.level).toBe("info");
|
||||
expect(parsed.message).toBe("Test message");
|
||||
expect(parsed.context).toEqual({ domain: "example.com" });
|
||||
});
|
||||
});
|
||||
});
|
||||
181
lib/logger/index.ts
Normal file
181
lib/logger/index.ts
Normal file
@@ -0,0 +1,181 @@
|
||||
/**
|
||||
* Core Logger - Unified structured logging interface
|
||||
*
|
||||
* Provides a consistent logging API across server and client environments
|
||||
* with support for OpenTelemetry tracing and correlation IDs.
|
||||
*/
|
||||
|
||||
// ============================================================================
|
||||
// Types
|
||||
// ============================================================================
|
||||
|
||||
export type LogLevel = "trace" | "debug" | "info" | "warn" | "error" | "fatal";
|
||||
|
||||
export type LogContext = Record<string, unknown>;
|
||||
|
||||
export interface LogEntry {
|
||||
level: LogLevel;
|
||||
message: string;
|
||||
timestamp: string;
|
||||
context?: LogContext;
|
||||
error?: SerializedError;
|
||||
correlationId?: string;
|
||||
traceId?: string;
|
||||
spanId?: string;
|
||||
environment?: string;
|
||||
}
|
||||
|
||||
export interface SerializedError {
|
||||
name: string;
|
||||
message: string;
|
||||
stack?: string;
|
||||
cause?: unknown;
|
||||
}
|
||||
|
||||
export interface Logger {
|
||||
trace(message: string, context?: LogContext): void;
|
||||
debug(message: string, context?: LogContext): void;
|
||||
info(message: string, context?: LogContext): void;
|
||||
warn(message: string, context?: LogContext): void;
|
||||
error(message: string, error?: Error | unknown, context?: LogContext): void;
|
||||
fatal(message: string, error?: Error | unknown, context?: LogContext): void;
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Constants
|
||||
// ============================================================================
|
||||
|
||||
const LOG_LEVEL_PRIORITY: Record<LogLevel, number> = {
|
||||
trace: 10,
|
||||
debug: 20,
|
||||
info: 30,
|
||||
warn: 40,
|
||||
error: 50,
|
||||
fatal: 60,
|
||||
};
|
||||
|
||||
// ============================================================================
|
||||
// Utilities
|
||||
// ============================================================================
|
||||
|
||||
/**
|
||||
* Validate and parse a log level string.
|
||||
* Returns undefined if the level is invalid.
|
||||
*/
|
||||
export function parseLogLevel(level?: string): LogLevel | undefined {
|
||||
if (!level) return undefined;
|
||||
const validLevels: LogLevel[] = [
|
||||
"trace",
|
||||
"debug",
|
||||
"info",
|
||||
"warn",
|
||||
"error",
|
||||
"fatal",
|
||||
];
|
||||
if (validLevels.includes(level as LogLevel)) {
|
||||
return level as LogLevel;
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the minimum log level based on environment.
|
||||
* - Development: debug
|
||||
* - Production: info
|
||||
* - Test: warn (to reduce noise)
|
||||
*/
|
||||
export function getMinLogLevel(): LogLevel {
|
||||
if (process.env.NODE_ENV === "test") {
|
||||
return "warn";
|
||||
}
|
||||
if (process.env.NODE_ENV === "development") {
|
||||
return "debug";
|
||||
}
|
||||
return "info";
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a log level should be emitted based on current minimum level.
|
||||
*/
|
||||
export function shouldLog(
|
||||
level: LogLevel,
|
||||
minLevel: LogLevel = getMinLogLevel(),
|
||||
): boolean {
|
||||
return LOG_LEVEL_PRIORITY[level] >= LOG_LEVEL_PRIORITY[minLevel];
|
||||
}
|
||||
|
||||
/**
|
||||
* Serialize an error object for logging.
|
||||
*/
|
||||
export function serializeError(error: unknown): SerializedError {
|
||||
if (error instanceof Error) {
|
||||
return {
|
||||
name: error.name,
|
||||
message: error.message,
|
||||
stack: error.stack,
|
||||
cause: error.cause,
|
||||
};
|
||||
}
|
||||
|
||||
// Handle non-Error objects
|
||||
return {
|
||||
name: "UnknownError",
|
||||
message: String(error),
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Format a log entry as JSON string for output.
|
||||
*/
|
||||
export function formatLogEntry(entry: LogEntry): string {
|
||||
return JSON.stringify(entry);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a structured log entry with all metadata.
|
||||
*/
|
||||
export function createLogEntry(
|
||||
level: LogLevel,
|
||||
message: string,
|
||||
options?: {
|
||||
context?: LogContext;
|
||||
error?: Error | unknown;
|
||||
correlationId?: string;
|
||||
traceId?: string;
|
||||
spanId?: string;
|
||||
},
|
||||
): LogEntry {
|
||||
const entry: LogEntry = {
|
||||
level,
|
||||
message,
|
||||
timestamp: new Date().toISOString(),
|
||||
};
|
||||
|
||||
// Add context if present
|
||||
if (options?.context && Object.keys(options.context).length > 0) {
|
||||
entry.context = options.context;
|
||||
}
|
||||
|
||||
// Add error if present
|
||||
if (options?.error) {
|
||||
entry.error = serializeError(options.error);
|
||||
}
|
||||
|
||||
// Add correlation/trace IDs if present
|
||||
if (options?.correlationId) {
|
||||
entry.correlationId = options.correlationId;
|
||||
}
|
||||
if (options?.traceId) {
|
||||
entry.traceId = options.traceId;
|
||||
}
|
||||
if (options?.spanId) {
|
||||
entry.spanId = options.spanId;
|
||||
}
|
||||
|
||||
// Add environment
|
||||
if (process.env.NODE_ENV) {
|
||||
entry.environment = process.env.NODE_ENV;
|
||||
}
|
||||
|
||||
return entry;
|
||||
}
|
||||
277
lib/logger/server.ts
Normal file
277
lib/logger/server.ts
Normal file
@@ -0,0 +1,277 @@
|
||||
import "server-only";
|
||||
|
||||
import { context, trace } from "@opentelemetry/api";
|
||||
import { after } from "next/server";
|
||||
import {
|
||||
createLogEntry,
|
||||
formatLogEntry,
|
||||
type LogContext,
|
||||
type Logger,
|
||||
type LogLevel,
|
||||
parseLogLevel,
|
||||
shouldLog,
|
||||
} from "@/lib/logger/index";
|
||||
|
||||
/**
|
||||
* Server-side logger with OpenTelemetry integration.
|
||||
*
|
||||
* Features:
|
||||
* - OpenTelemetry trace/span ID extraction
|
||||
* - Correlation ID support
|
||||
* - PostHog integration for critical events
|
||||
* - Environment-based log level filtering
|
||||
* - Compatible with Vercel logs
|
||||
*/
|
||||
|
||||
// ============================================================================
|
||||
// Context Management
|
||||
// ============================================================================
|
||||
|
||||
// AsyncLocalStorage for correlation ID propagation
|
||||
import { AsyncLocalStorage } from "node:async_hooks";
|
||||
|
||||
const correlationIdStorage = new AsyncLocalStorage<string>();
|
||||
|
||||
/**
|
||||
* Set correlation ID for the current async context.
|
||||
* This allows propagating the ID through async operations.
|
||||
*/
|
||||
export function setCorrelationId(id: string): void {
|
||||
correlationIdStorage.enterWith(id);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get correlation ID from the current async context.
|
||||
*/
|
||||
export function getCorrelationId(): string | undefined {
|
||||
return correlationIdStorage.getStore();
|
||||
}
|
||||
|
||||
/**
|
||||
* Run a function with a specific correlation ID context.
|
||||
*/
|
||||
export function withCorrelationId<T>(id: string, fn: () => T): T {
|
||||
return correlationIdStorage.run(id, fn);
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// OpenTelemetry Integration
|
||||
// ============================================================================
|
||||
|
||||
/**
|
||||
* Extract OpenTelemetry trace and span IDs from the current context.
|
||||
*/
|
||||
function getTraceContext(): { traceId?: string; spanId?: string } {
|
||||
try {
|
||||
const span = trace.getSpan(context.active());
|
||||
if (span) {
|
||||
const spanContext = span.spanContext();
|
||||
return {
|
||||
traceId: spanContext.traceId,
|
||||
spanId: spanContext.spanId,
|
||||
};
|
||||
}
|
||||
} catch {
|
||||
// OpenTelemetry not available or not configured
|
||||
}
|
||||
return {};
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Logger Implementation
|
||||
// ============================================================================
|
||||
|
||||
class ServerLogger implements Logger {
|
||||
private minLevel: LogLevel;
|
||||
|
||||
constructor(minLevel?: LogLevel) {
|
||||
// Default to environment-based level, but allow override
|
||||
this.minLevel =
|
||||
minLevel ||
|
||||
parseLogLevel(process.env.LOG_LEVEL) ||
|
||||
(process.env.NODE_ENV === "test"
|
||||
? "warn"
|
||||
: process.env.NODE_ENV === "development"
|
||||
? "debug"
|
||||
: "info");
|
||||
}
|
||||
|
||||
private log(level: LogLevel, message: string, context?: LogContext): void {
|
||||
if (!shouldLog(level, this.minLevel)) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const { traceId, spanId } = getTraceContext();
|
||||
const correlationId = getCorrelationId();
|
||||
|
||||
const entry = createLogEntry(level, message, {
|
||||
context,
|
||||
correlationId,
|
||||
traceId,
|
||||
spanId,
|
||||
});
|
||||
|
||||
const formatted = formatLogEntry(entry);
|
||||
|
||||
// Output to appropriate console method
|
||||
switch (level) {
|
||||
case "trace":
|
||||
case "debug":
|
||||
console.debug(formatted);
|
||||
break;
|
||||
case "info":
|
||||
console.info(formatted);
|
||||
break;
|
||||
case "warn":
|
||||
console.warn(formatted);
|
||||
break;
|
||||
case "error":
|
||||
case "fatal":
|
||||
console.error(formatted);
|
||||
break;
|
||||
}
|
||||
} catch (err) {
|
||||
// Logging should never crash the application
|
||||
console.error("[logger] failed to log:", err);
|
||||
}
|
||||
}
|
||||
|
||||
private logWithError(
|
||||
level: LogLevel,
|
||||
message: string,
|
||||
error?: Error | unknown,
|
||||
context?: LogContext,
|
||||
): void {
|
||||
if (!shouldLog(level, this.minLevel)) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const { traceId, spanId } = getTraceContext();
|
||||
const correlationId = getCorrelationId();
|
||||
|
||||
const entry = createLogEntry(level, message, {
|
||||
context,
|
||||
error,
|
||||
correlationId,
|
||||
traceId,
|
||||
spanId,
|
||||
});
|
||||
|
||||
const formatted = formatLogEntry(entry);
|
||||
|
||||
// Output to console
|
||||
switch (level) {
|
||||
case "error":
|
||||
case "fatal":
|
||||
console.error(formatted);
|
||||
break;
|
||||
default:
|
||||
console.log(formatted);
|
||||
break;
|
||||
}
|
||||
|
||||
// Track critical errors in PostHog (async, non-blocking)
|
||||
if ((level === "error" || level === "fatal") && error instanceof Error) {
|
||||
this.trackErrorInPostHog(error, context, correlationId);
|
||||
}
|
||||
} catch (err) {
|
||||
// Logging should never crash the application
|
||||
console.error("[logger] failed to log:", err);
|
||||
}
|
||||
}
|
||||
|
||||
private trackErrorInPostHog(
|
||||
error: Error,
|
||||
context?: LogContext,
|
||||
correlationId?: string,
|
||||
): void {
|
||||
try {
|
||||
// Use after() for non-blocking PostHog tracking
|
||||
after(async () => {
|
||||
try {
|
||||
const { analytics } = await import("@/lib/analytics/server");
|
||||
analytics.trackException(error, {
|
||||
...context,
|
||||
correlationId,
|
||||
source: "logger",
|
||||
});
|
||||
} catch {
|
||||
// Graceful degradation - don't throw if PostHog fails
|
||||
}
|
||||
});
|
||||
} catch {
|
||||
// If after() not available, silently skip PostHog tracking
|
||||
}
|
||||
}
|
||||
|
||||
trace(message: string, context?: LogContext): void {
|
||||
this.log("trace", message, context);
|
||||
}
|
||||
|
||||
debug(message: string, context?: LogContext): void {
|
||||
this.log("debug", message, context);
|
||||
}
|
||||
|
||||
info(message: string, context?: LogContext): void {
|
||||
this.log("info", message, context);
|
||||
}
|
||||
|
||||
warn(message: string, context?: LogContext): void {
|
||||
this.log("warn", message, context);
|
||||
}
|
||||
|
||||
error(message: string, error?: Error | unknown, context?: LogContext): void {
|
||||
this.logWithError("error", message, error, context);
|
||||
}
|
||||
|
||||
fatal(message: string, error?: Error | unknown, context?: LogContext): void {
|
||||
this.logWithError("fatal", message, error, context);
|
||||
}
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Exports
|
||||
// ============================================================================
|
||||
|
||||
/**
|
||||
* Singleton server logger instance.
|
||||
* Use this for all server-side logging.
|
||||
*
|
||||
* @example
|
||||
* ```typescript
|
||||
* import { logger } from "@/lib/logger/server";
|
||||
*
|
||||
* logger.info("User logged in", { userId: "123" });
|
||||
* logger.error("Database connection failed", error, { table: "users" });
|
||||
* ```
|
||||
*/
|
||||
export const logger = new ServerLogger();
|
||||
|
||||
/**
|
||||
* Create a child logger with a specific context prefix.
|
||||
* Useful for service-specific logging.
|
||||
*
|
||||
* @example
|
||||
* ```typescript
|
||||
* const dnsLogger = createLogger({ service: "dns" });
|
||||
* dnsLogger.debug("Resolving domain", { domain: "example.com" });
|
||||
* ```
|
||||
*/
|
||||
export function createLogger(baseContext: LogContext): Logger {
|
||||
return {
|
||||
trace: (message: string, context?: LogContext) =>
|
||||
logger.trace(message, { ...baseContext, ...context }),
|
||||
debug: (message: string, context?: LogContext) =>
|
||||
logger.debug(message, { ...baseContext, ...context }),
|
||||
info: (message: string, context?: LogContext) =>
|
||||
logger.info(message, { ...baseContext, ...context }),
|
||||
warn: (message: string, context?: LogContext) =>
|
||||
logger.warn(message, { ...baseContext, ...context }),
|
||||
error: (message: string, error?: Error | unknown, context?: LogContext) =>
|
||||
logger.error(message, error, { ...baseContext, ...context }),
|
||||
fatal: (message: string, error?: Error | unknown, context?: LogContext) =>
|
||||
logger.fatal(message, error, { ...baseContext, ...context }),
|
||||
};
|
||||
}
|
||||
@@ -1,5 +1,8 @@
|
||||
import "server-only";
|
||||
import type { Browser } from "puppeteer-core";
|
||||
import { createLogger } from "@/lib/logger/server";
|
||||
|
||||
const logger = createLogger({ source: "puppeteer" });
|
||||
|
||||
let browserPromise: Promise<Browser> | null = null;
|
||||
|
||||
@@ -115,7 +118,7 @@ export function getBrowser(
|
||||
): Promise<Browser> {
|
||||
if (!browserPromise) {
|
||||
browserPromise = createBrowser(overrides).catch((err) => {
|
||||
console.error("[puppeteer] failed to create browser", err);
|
||||
logger.error("failed to create browser", err);
|
||||
// Reset promise to allow retry on next call
|
||||
browserPromise = null;
|
||||
throw err;
|
||||
@@ -130,7 +133,7 @@ export async function closeBrowser(): Promise<void> {
|
||||
const browser = await browserPromise;
|
||||
await browser.close();
|
||||
} catch (err) {
|
||||
console.error("[puppeteer] failed to close browser", err);
|
||||
logger.error("failed to close browser", err);
|
||||
} finally {
|
||||
browserPromise = null;
|
||||
}
|
||||
@@ -139,7 +142,7 @@ export async function closeBrowser(): Promise<void> {
|
||||
|
||||
if (process.env.NODE_ENV !== "test") {
|
||||
const handleShutdown = async (signal: string) => {
|
||||
console.log(`[puppeteer] received ${signal}, closing browser...`);
|
||||
logger.info(`received ${signal}, closing browser`);
|
||||
await closeBrowser();
|
||||
process.exit(0);
|
||||
};
|
||||
|
||||
@@ -3,6 +3,9 @@ import "server-only";
|
||||
import { cacheLife, cacheTag } from "next/cache";
|
||||
import type { BootstrapData } from "rdapper";
|
||||
import { RDAP_BOOTSTRAP_URL } from "@/lib/constants/external-apis";
|
||||
import { createLogger } from "@/lib/logger/server";
|
||||
|
||||
const logger = createLogger({ source: "rdap-bootstrap" });
|
||||
|
||||
/**
|
||||
* Fetch RDAP bootstrap data with Next.js Data Cache.
|
||||
@@ -30,6 +33,6 @@ export async function getRdapBootstrapData(): Promise<BootstrapData> {
|
||||
}
|
||||
|
||||
const bootstrap = await res.json();
|
||||
console.info("[rdap-bootstrap] Bootstrap data fetched");
|
||||
logger.info("bootstrap data fetched");
|
||||
return bootstrap;
|
||||
}
|
||||
|
||||
@@ -9,6 +9,10 @@ import {
|
||||
REVALIDATE_MIN_SEO,
|
||||
} from "@/lib/constants/ttl";
|
||||
import { inngest } from "@/lib/inngest/client";
|
||||
import { createLogger } from "@/lib/logger/server";
|
||||
|
||||
const logger = createLogger({ source: "schedule" });
|
||||
|
||||
import {
|
||||
applyDecayToTtl,
|
||||
getDecayMultiplier,
|
||||
@@ -59,9 +63,11 @@ export async function scheduleRevalidation(
|
||||
|
||||
// Check if domain should stop being revalidated due to inactivity
|
||||
if (shouldStopRevalidation(section, lastAccessedAt ?? null)) {
|
||||
console.info(
|
||||
`[schedule] skip ${section} ${normalizedDomain} (stopped: inactive ${lastAccessedAt ? `since ${lastAccessedAt.toISOString()}` : "never accessed"})`,
|
||||
);
|
||||
logger.info(`skip ${section} ${normalizedDomain} (stopped: inactive)`, {
|
||||
domain: normalizedDomain,
|
||||
section,
|
||||
lastAccessedAt: lastAccessedAt?.toISOString() ?? "never",
|
||||
});
|
||||
return false;
|
||||
}
|
||||
|
||||
@@ -79,9 +85,12 @@ export async function scheduleRevalidation(
|
||||
const daysInactive = lastAccessedAt
|
||||
? Math.floor((now - lastAccessedAt.getTime()) / (1000 * 60 * 60 * 24))
|
||||
: null;
|
||||
console.info(
|
||||
`[schedule] decay ${section} ${normalizedDomain} (${decayMultiplier}x, inactive ${daysInactive ? `${daysInactive}d` : "unknown"})`,
|
||||
);
|
||||
logger.info(`decay ${section} ${normalizedDomain}`, {
|
||||
domain: normalizedDomain,
|
||||
section,
|
||||
decayMultiplier,
|
||||
daysInactive: daysInactive ?? "unknown",
|
||||
});
|
||||
}
|
||||
|
||||
// Validate dueAtMs before scheduling
|
||||
@@ -111,15 +120,17 @@ export async function scheduleRevalidation(
|
||||
id: eventId,
|
||||
});
|
||||
|
||||
console.debug(
|
||||
`[schedule] ok ${section} ${normalizedDomain} at ${new Date(scheduledDueMs).toISOString()}`,
|
||||
);
|
||||
logger.debug(`ok ${section} ${normalizedDomain}`, {
|
||||
domain: normalizedDomain,
|
||||
section,
|
||||
scheduledAt: new Date(scheduledDueMs).toISOString(),
|
||||
});
|
||||
return true;
|
||||
} catch (err) {
|
||||
console.warn(
|
||||
`[schedule] failed ${section} ${normalizedDomain}`,
|
||||
err instanceof Error ? err : new Error(String(err)),
|
||||
);
|
||||
logger.error(`failed ${section} ${normalizedDomain}`, err, {
|
||||
domain: normalizedDomain,
|
||||
section,
|
||||
});
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,8 +2,11 @@ import "server-only";
|
||||
|
||||
import { putBlob } from "@/lib/blob";
|
||||
import { deterministicHash } from "@/lib/hash";
|
||||
import { createLogger } from "@/lib/logger/server";
|
||||
import type { StorageKind } from "@/lib/schemas";
|
||||
|
||||
const logger = createLogger({ source: "storage" });
|
||||
|
||||
const UPLOAD_MAX_ATTEMPTS = 3;
|
||||
const UPLOAD_BACKOFF_BASE_MS = 100;
|
||||
const UPLOAD_BACKOFF_MAX_MS = 2000;
|
||||
@@ -55,9 +58,9 @@ async function uploadWithRetry(
|
||||
|
||||
for (let attempt = 0; attempt < maxAttempts; attempt++) {
|
||||
try {
|
||||
console.debug(
|
||||
`[storage] upload attempt ${attempt + 1}/${maxAttempts} for ${pathname}`,
|
||||
);
|
||||
logger.debug(`upload attempt ${attempt + 1}/${maxAttempts}`, {
|
||||
pathname,
|
||||
});
|
||||
|
||||
const result = await putBlob({
|
||||
pathname,
|
||||
@@ -66,16 +69,19 @@ async function uploadWithRetry(
|
||||
cacheControlMaxAge,
|
||||
});
|
||||
|
||||
console.info(`[storage] upload ok ${pathname} (attempt ${attempt + 1})`);
|
||||
logger.info(`upload ok ${pathname}`, {
|
||||
pathname,
|
||||
attempts: attempt + 1,
|
||||
});
|
||||
|
||||
return result;
|
||||
} catch (err) {
|
||||
lastError = err instanceof Error ? err : new Error(String(err));
|
||||
|
||||
console.warn(
|
||||
`[storage] upload attempt failed ${attempt + 1}/${maxAttempts} for ${pathname}`,
|
||||
lastError,
|
||||
);
|
||||
logger.warn(`upload attempt failed ${attempt + 1}/${maxAttempts}`, {
|
||||
pathname,
|
||||
attempts: attempt + 1,
|
||||
});
|
||||
|
||||
// Don't sleep on last attempt
|
||||
if (attempt < maxAttempts - 1) {
|
||||
@@ -84,9 +90,10 @@ async function uploadWithRetry(
|
||||
UPLOAD_BACKOFF_BASE_MS,
|
||||
UPLOAD_BACKOFF_MAX_MS,
|
||||
);
|
||||
console.debug(
|
||||
`[storage] retrying after ${delay}ms delay for ${pathname}`,
|
||||
);
|
||||
logger.debug(`retrying after ${delay}ms delay`, {
|
||||
pathname,
|
||||
durationMs: delay,
|
||||
});
|
||||
await sleep(delay);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -10,13 +10,16 @@ import {
|
||||
} from "@/lib/db/repos/providers";
|
||||
import { certificates as certTable } from "@/lib/db/schema";
|
||||
import { toRegistrableDomain } from "@/lib/domain-server";
|
||||
import { createLogger } from "@/lib/logger/server";
|
||||
import { detectCertificateAuthority } from "@/lib/providers/detection";
|
||||
import { scheduleRevalidation } from "@/lib/schedule";
|
||||
import type { Certificate } from "@/lib/schemas";
|
||||
import { ttlForCertificates } from "@/lib/ttl";
|
||||
|
||||
const logger = createLogger({ source: "certificates" });
|
||||
|
||||
export async function getCertificates(domain: string): Promise<Certificate[]> {
|
||||
console.debug(`[certificates] start ${domain}`);
|
||||
logger.debug(`start ${domain}`, { domain });
|
||||
|
||||
// Only support registrable domains (no subdomains, IPs, or invalid TLDs)
|
||||
const registrable = toRegistrableDomain(domain);
|
||||
@@ -64,9 +67,11 @@ export async function getCertificates(domain: string): Promise<Certificate[]> {
|
||||
caProvider: detectCertificateAuthority(c.issuer),
|
||||
}));
|
||||
|
||||
console.info(
|
||||
`[certificates] cache hit ${registrable} count=${out.length}`,
|
||||
);
|
||||
logger.info(`cache hit ${registrable}`, {
|
||||
domain: registrable,
|
||||
count: out.length,
|
||||
cached: true,
|
||||
});
|
||||
return out;
|
||||
}
|
||||
}
|
||||
@@ -177,21 +182,20 @@ export async function getCertificates(domain: string): Promise<Certificate[]> {
|
||||
dueAtMs,
|
||||
existingDomain.lastAccessedAt ?? null,
|
||||
).catch((err) => {
|
||||
console.warn(
|
||||
`[certificates] schedule failed for ${registrable}`,
|
||||
err instanceof Error ? err : new Error(String(err)),
|
||||
);
|
||||
logger.error("schedule failed", err, {
|
||||
domain: registrable,
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
console.info(`[certificates] ok ${registrable} chainLength=${out.length}`);
|
||||
logger.info(`ok ${registrable}`, {
|
||||
domain: registrable,
|
||||
chainLength: out.length,
|
||||
});
|
||||
return out;
|
||||
} catch (err) {
|
||||
console.warn(
|
||||
`[certificates] error ${registrable}`,
|
||||
err instanceof Error ? err : new Error(String(err)),
|
||||
);
|
||||
logger.error(`error ${registrable}`, err, { domain: registrable });
|
||||
// Do not treat as fatal; return empty and avoid long-lived negative cache
|
||||
return [];
|
||||
}
|
||||
|
||||
@@ -10,6 +10,7 @@ import { dnsRecords } from "@/lib/db/schema";
|
||||
import { toRegistrableDomain } from "@/lib/domain-server";
|
||||
import { fetchWithTimeoutAndRetry } from "@/lib/fetch";
|
||||
import { simpleHash } from "@/lib/hash";
|
||||
import { createLogger } from "@/lib/logger/server";
|
||||
import { scheduleRevalidation } from "@/lib/schedule";
|
||||
import {
|
||||
type DnsRecord,
|
||||
@@ -19,6 +20,8 @@ import {
|
||||
} from "@/lib/schemas";
|
||||
import { ttlForDnsRecord } from "@/lib/ttl";
|
||||
|
||||
const logger = createLogger({ source: "dns" });
|
||||
|
||||
// ============================================================================
|
||||
// DNS resolution
|
||||
// ============================================================================
|
||||
@@ -81,7 +84,7 @@ function buildDohUrl(
|
||||
export const resolveAll = cache(async function resolveAll(
|
||||
domain: string,
|
||||
): Promise<DnsResolveResult> {
|
||||
console.debug(`[dns] start ${domain}`);
|
||||
logger.debug(`start ${domain}`, { domain });
|
||||
|
||||
const providers = providerOrderForLookup(domain);
|
||||
const durationByProvider: Record<string, number> = {};
|
||||
@@ -167,9 +170,11 @@ export const resolveAll = cache(async function resolveAll(
|
||||
const deduplicated = deduplicateDnsRecords(assembled);
|
||||
const sorted = sortDnsRecordsByType(deduplicated, types);
|
||||
if (allFreshAcrossTypes) {
|
||||
console.info(
|
||||
`[dns] cache hit ${registrable} types=${freshTypes.join(",")}`,
|
||||
);
|
||||
logger.info(`cache hit ${registrable}`, {
|
||||
domain: registrable,
|
||||
types: freshTypes.join(","),
|
||||
cached: true,
|
||||
});
|
||||
return { records: sorted, resolver: resolverHint };
|
||||
}
|
||||
|
||||
@@ -244,10 +249,10 @@ export const resolveAll = cache(async function resolveAll(
|
||||
soonest,
|
||||
existingDomain.lastAccessedAt ?? null,
|
||||
).catch((err) => {
|
||||
console.warn(
|
||||
`[dns] schedule failed partial ${registrable}`,
|
||||
err instanceof Error ? err : new Error(String(err)),
|
||||
);
|
||||
logger.error("schedule failed partial", err, {
|
||||
domain: registrable,
|
||||
type: "partial",
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
@@ -279,19 +284,22 @@ export const resolveAll = cache(async function resolveAll(
|
||||
{ A: 0, AAAA: 0, MX: 0, TXT: 0, NS: 0 } as Record<DnsType, number>,
|
||||
);
|
||||
|
||||
console.info(
|
||||
`[dns] ok partial ${registrable} counts=${JSON.stringify(counts)} resolver=${pinnedProvider.key} duration=${durationByProvider[pinnedProvider.key]}ms`,
|
||||
);
|
||||
logger.info(`ok partial ${registrable}`, {
|
||||
domain: registrable,
|
||||
counts,
|
||||
resolver: pinnedProvider.key,
|
||||
durationMs: durationByProvider[pinnedProvider.key],
|
||||
});
|
||||
return {
|
||||
records: merged,
|
||||
resolver: pinnedProvider.key,
|
||||
} as DnsResolveResult;
|
||||
} catch (err) {
|
||||
console.warn(
|
||||
`[dns] partial refresh failed ${registrable} provider=${pinnedProvider.key}`,
|
||||
err instanceof Error ? err : new Error(String(err)),
|
||||
);
|
||||
// Fall through to full provider loop below
|
||||
logger.error("partial refresh failed", err, {
|
||||
domain: registrable,
|
||||
provider: pinnedProvider.key,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -376,26 +384,29 @@ export const resolveAll = cache(async function resolveAll(
|
||||
soonest,
|
||||
existingDomain.lastAccessedAt ?? null,
|
||||
).catch((err) => {
|
||||
console.warn(
|
||||
`[dns] schedule failed full ${registrable}`,
|
||||
err instanceof Error ? err : new Error(String(err)),
|
||||
);
|
||||
logger.error("schedule failed full", err, {
|
||||
domain: registrable,
|
||||
type: "full",
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
console.info(
|
||||
`[dns] ok ${registrable} counts=${JSON.stringify(counts)} resolver=${resolverUsed} durations=${JSON.stringify(durationByProvider)}`,
|
||||
);
|
||||
logger.info(`ok ${registrable}`, {
|
||||
domain: registrable,
|
||||
counts,
|
||||
resolver: resolverUsed,
|
||||
durationByProvider,
|
||||
});
|
||||
// Deduplicate records before returning (same logic as replaceDns uses for DB persistence)
|
||||
const deduplicated = deduplicateDnsRecords(flat);
|
||||
// Sort records deterministically to match cache-path ordering
|
||||
const sorted = sortDnsRecordsByType(deduplicated, types);
|
||||
return { records: sorted, resolver: resolverUsed } as DnsResolveResult;
|
||||
} catch (err) {
|
||||
console.warn(
|
||||
`[dns] provider attempt failed ${registrable} provider=${provider.key}`,
|
||||
err instanceof Error ? err : new Error(String(err)),
|
||||
);
|
||||
logger.warn(`provider attempt failed ${registrable}`, {
|
||||
domain: registrable,
|
||||
provider: provider.key,
|
||||
});
|
||||
durationByProvider[provider.key] = Date.now() - attemptStart;
|
||||
lastError = err;
|
||||
// Try next provider in rotation
|
||||
@@ -403,13 +414,14 @@ export const resolveAll = cache(async function resolveAll(
|
||||
}
|
||||
|
||||
// All providers failed
|
||||
console.error(
|
||||
`[dns] all providers failed ${registrable} tried=${providers.map((p) => p.key).join(",")}`,
|
||||
lastError,
|
||||
);
|
||||
throw new Error(
|
||||
const error = new Error(
|
||||
`All DoH providers failed for ${registrable}: ${String(lastError)}`,
|
||||
);
|
||||
logger.error(`all providers failed ${registrable}`, error, {
|
||||
domain: registrable,
|
||||
providers: providers.map((p) => p.key).join(","),
|
||||
});
|
||||
throw error;
|
||||
});
|
||||
|
||||
async function resolveTypeWithProvider(
|
||||
|
||||
@@ -5,9 +5,12 @@ import { getFaviconByDomain, upsertFavicon } from "@/lib/db/repos/favicons";
|
||||
import { toRegistrableDomain } from "@/lib/domain-server";
|
||||
import { fetchRemoteAsset, RemoteAssetError } from "@/lib/fetch-remote-asset";
|
||||
import { convertBufferToImageCover } from "@/lib/image";
|
||||
import { createLogger } from "@/lib/logger/server";
|
||||
import { storeImage } from "@/lib/storage";
|
||||
import { ttlForFavicon } from "@/lib/ttl";
|
||||
|
||||
const logger = createLogger({ source: "favicon" });
|
||||
|
||||
const DEFAULT_SIZE = 32;
|
||||
const REQUEST_TIMEOUT_MS = 1500; // per each method
|
||||
const MAX_FAVICON_BYTES = 1 * 1024 * 1024; // 1MB
|
||||
@@ -37,7 +40,7 @@ async function fetchFaviconInternal(
|
||||
): Promise<{ url: string | null }> {
|
||||
// Check for in-flight request across all SSR contexts
|
||||
if (faviconPromises.has(registrable)) {
|
||||
console.debug("[favicon] in-flight request hit");
|
||||
logger.debug("in-flight request hit", { domain: registrable });
|
||||
// biome-ignore lint/style/noNonNullAssertion: checked above
|
||||
return faviconPromises.get(registrable)!;
|
||||
}
|
||||
@@ -57,9 +60,10 @@ async function fetchFaviconInternal(
|
||||
// Safety: Auto-cleanup stale promise after timeout
|
||||
const timeoutId = setTimeout(() => {
|
||||
if (faviconPromises.get(registrable) === promise) {
|
||||
console.warn(
|
||||
`[favicon] cleaning up stale promise for ${registrable} after ${PROMISE_CLEANUP_TIMEOUT_MS}ms`,
|
||||
);
|
||||
logger.warn(`cleaning up stale promise for ${registrable}`, {
|
||||
domain: registrable,
|
||||
timeoutMs: PROMISE_CLEANUP_TIMEOUT_MS,
|
||||
});
|
||||
faviconPromises.delete(registrable);
|
||||
}
|
||||
}, PROMISE_CLEANUP_TIMEOUT_MS);
|
||||
@@ -87,15 +91,12 @@ async function fetchFaviconWork(
|
||||
faviconRecord.url !== null || faviconRecord.notFound === true;
|
||||
|
||||
if (isDefinitiveResult) {
|
||||
console.debug("[favicon] db cache hit");
|
||||
logger.debug("db cache hit", { domain: registrable, cached: true });
|
||||
return { url: faviconRecord.url };
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
console.warn(
|
||||
"[favicon] db read failed",
|
||||
err instanceof Error ? err : new Error(String(err)),
|
||||
);
|
||||
logger.error("db read failed", err, { domain: registrable });
|
||||
}
|
||||
|
||||
// Generate favicon (cache missed)
|
||||
@@ -159,10 +160,7 @@ async function fetchFaviconWork(
|
||||
expiresAt,
|
||||
});
|
||||
} catch (err) {
|
||||
console.error(
|
||||
"[favicon] db persist error",
|
||||
err instanceof Error ? err : new Error(String(err)),
|
||||
);
|
||||
logger.error("db persist error", err, { domain: registrable });
|
||||
}
|
||||
|
||||
return { url };
|
||||
@@ -200,10 +198,7 @@ async function fetchFaviconWork(
|
||||
expiresAt,
|
||||
});
|
||||
} catch (err) {
|
||||
console.error(
|
||||
"[favicon] db persist error (null)",
|
||||
err instanceof Error ? err : new Error(String(err)),
|
||||
);
|
||||
logger.error("db persist error (null)", err, { domain: registrable });
|
||||
}
|
||||
|
||||
return { url: null };
|
||||
|
||||
@@ -158,11 +158,6 @@ describe("probeHeaders", () => {
|
||||
throw enotfoundError;
|
||||
});
|
||||
|
||||
const consoleSpy = vi.spyOn(console, "debug").mockImplementation(() => {});
|
||||
const consoleErrorSpy = vi
|
||||
.spyOn(console, "error")
|
||||
.mockImplementation(() => {});
|
||||
|
||||
const { probeHeaders } = await import("./headers");
|
||||
const out = await probeHeaders("no-web-hosting.invalid");
|
||||
|
||||
@@ -170,17 +165,10 @@ describe("probeHeaders", () => {
|
||||
expect(out.headers.length).toBe(0);
|
||||
expect(out.status).toBe(0);
|
||||
|
||||
// Should log as debug (not error) since this is expected
|
||||
expect(consoleSpy).toHaveBeenCalledWith(
|
||||
expect.stringContaining("[headers] no web hosting"),
|
||||
);
|
||||
|
||||
// Should NOT log as error
|
||||
expect(consoleErrorSpy).not.toHaveBeenCalled();
|
||||
// Note: Logger calls are tested by integration - the service calls logger.debug()
|
||||
// which is mocked in vitest.setup.ts to not actually log anything
|
||||
|
||||
fetchMock.mockRestore();
|
||||
consoleSpy.mockRestore();
|
||||
consoleErrorSpy.mockRestore();
|
||||
});
|
||||
|
||||
it("logs actual errors (non-DNS) as errors", async () => {
|
||||
@@ -191,13 +179,6 @@ describe("probeHeaders", () => {
|
||||
throw realError;
|
||||
});
|
||||
|
||||
const consoleErrorSpy = vi
|
||||
.spyOn(console, "error")
|
||||
.mockImplementation(() => {});
|
||||
const consoleDebugSpy = vi
|
||||
.spyOn(console, "debug")
|
||||
.mockImplementation(() => {});
|
||||
|
||||
const { probeHeaders } = await import("./headers");
|
||||
const out = await probeHeaders("timeout.invalid");
|
||||
|
||||
@@ -205,19 +186,9 @@ describe("probeHeaders", () => {
|
||||
expect(out.headers.length).toBe(0);
|
||||
expect(out.status).toBe(0);
|
||||
|
||||
// Should log as error since this is unexpected
|
||||
expect(consoleErrorSpy).toHaveBeenCalledWith(
|
||||
expect.stringContaining("[headers] error"),
|
||||
realError,
|
||||
);
|
||||
|
||||
// Should NOT log as debug (no web hosting)
|
||||
expect(consoleDebugSpy).not.toHaveBeenCalledWith(
|
||||
expect.stringContaining("[headers] no web hosting"),
|
||||
);
|
||||
// Note: Logger calls are tested by integration - the service calls logger.error()
|
||||
// which is mocked in vitest.setup.ts to not actually log anything
|
||||
|
||||
fetchMock.mockRestore();
|
||||
consoleErrorSpy.mockRestore();
|
||||
consoleDebugSpy.mockRestore();
|
||||
});
|
||||
});
|
||||
|
||||
@@ -9,10 +9,13 @@ import { replaceHeaders } from "@/lib/db/repos/headers";
|
||||
import { httpHeaders } from "@/lib/db/schema";
|
||||
import { toRegistrableDomain } from "@/lib/domain-server";
|
||||
import { fetchWithSelectiveRedirects } from "@/lib/fetch";
|
||||
import { createLogger } from "@/lib/logger/server";
|
||||
import { scheduleRevalidation } from "@/lib/schedule";
|
||||
import type { HttpHeader, HttpHeadersResponse } from "@/lib/schemas";
|
||||
import { ttlForHeaders } from "@/lib/ttl";
|
||||
|
||||
const logger = createLogger({ source: "headers" });
|
||||
|
||||
/**
|
||||
* Probe HTTP headers for a domain with Postgres caching.
|
||||
*
|
||||
@@ -24,7 +27,7 @@ export const probeHeaders = cache(async function probeHeaders(
|
||||
domain: string,
|
||||
): Promise<HttpHeadersResponse> {
|
||||
const url = `https://${domain}/`;
|
||||
console.debug(`[headers] start ${domain}`);
|
||||
logger.debug(`start ${domain}`, { domain });
|
||||
|
||||
// Only support registrable domains (no subdomains, IPs, or invalid TLDs)
|
||||
const registrable = toRegistrableDomain(domain);
|
||||
@@ -62,9 +65,12 @@ export const probeHeaders = cache(async function probeHeaders(
|
||||
statusMessage = undefined;
|
||||
}
|
||||
|
||||
console.info(
|
||||
`[headers] cache hit ${registrable} status=${row.status} count=${normalized.length}`,
|
||||
);
|
||||
logger.info(`cache hit ${registrable}`, {
|
||||
domain: registrable,
|
||||
status: row.status,
|
||||
count: normalized.length,
|
||||
cached: true,
|
||||
});
|
||||
return { headers: normalized, status: row.status, statusMessage };
|
||||
}
|
||||
|
||||
@@ -104,16 +110,17 @@ export const probeHeaders = cache(async function probeHeaders(
|
||||
dueAtMs,
|
||||
existingDomain.lastAccessedAt ?? null,
|
||||
).catch((err) => {
|
||||
console.warn(
|
||||
`[headers] schedule failed for ${registrable}`,
|
||||
err instanceof Error ? err : new Error(String(err)),
|
||||
);
|
||||
logger.error("schedule failed", err, {
|
||||
domain: registrable,
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
console.info(
|
||||
`[headers] ok ${registrable} status=${final.status} count=${normalized.length}`,
|
||||
);
|
||||
logger.info(`ok ${registrable}`, {
|
||||
domain: registrable,
|
||||
status: final.status,
|
||||
count: normalized.length,
|
||||
});
|
||||
|
||||
// Get status message
|
||||
let statusMessage: string | undefined;
|
||||
@@ -130,14 +137,11 @@ export const probeHeaders = cache(async function probeHeaders(
|
||||
const isDnsError = isExpectedDnsError(err);
|
||||
|
||||
if (isDnsError) {
|
||||
console.debug(
|
||||
`[headers] no web hosting ${registrable} (no A/AAAA records)`,
|
||||
);
|
||||
logger.debug(`no web hosting ${registrable} (no A/AAAA records)`, {
|
||||
domain: registrable,
|
||||
});
|
||||
} else {
|
||||
console.error(
|
||||
`[headers] error ${registrable}`,
|
||||
err instanceof Error ? err : new Error(String(err)),
|
||||
);
|
||||
logger.error(`error ${registrable}`, err, { domain: registrable });
|
||||
}
|
||||
|
||||
// Return empty on failure without caching to avoid long-lived negatives
|
||||
|
||||
@@ -14,6 +14,7 @@ import {
|
||||
providers as providersTable,
|
||||
} from "@/lib/db/schema";
|
||||
import { toRegistrableDomain } from "@/lib/domain-server";
|
||||
import { createLogger } from "@/lib/logger/server";
|
||||
import {
|
||||
detectDnsProvider,
|
||||
detectEmailProvider,
|
||||
@@ -26,6 +27,8 @@ import { resolveAll } from "@/server/services/dns";
|
||||
import { probeHeaders } from "@/server/services/headers";
|
||||
import { lookupIpMeta } from "@/server/services/ip";
|
||||
|
||||
const logger = createLogger({ source: "hosting" });
|
||||
|
||||
/**
|
||||
* Detect hosting, email, and DNS providers for a domain with Postgres caching.
|
||||
*
|
||||
@@ -36,7 +39,7 @@ import { lookupIpMeta } from "@/server/services/ip";
|
||||
export const detectHosting = cache(async function detectHosting(
|
||||
domain: string,
|
||||
): Promise<Hosting> {
|
||||
console.debug(`[hosting] start ${domain}`);
|
||||
logger.debug(`start ${domain}`, { domain });
|
||||
|
||||
// Only support registrable domains (no subdomains, IPs, or invalid TLDs)
|
||||
const registrable = toRegistrableDomain(domain);
|
||||
@@ -100,9 +103,13 @@ export const detectHosting = cache(async function detectHosting(
|
||||
lon: row.geoLon ?? null,
|
||||
},
|
||||
};
|
||||
console.info(
|
||||
`[hosting] cache hit ${domain} hosting=${info.hostingProvider.name} email=${info.emailProvider.name} dns=${info.dnsProvider.name}`,
|
||||
);
|
||||
logger.info(`cache hit ${domain}`, {
|
||||
domain,
|
||||
hosting: info.hostingProvider.name,
|
||||
email: info.emailProvider.name,
|
||||
dns: info.dnsProvider.name,
|
||||
cached: true,
|
||||
});
|
||||
return info;
|
||||
}
|
||||
}
|
||||
@@ -119,10 +126,7 @@ export const detectHosting = cache(async function detectHosting(
|
||||
const [headersResponse, meta] = await Promise.all([
|
||||
hasWebHosting
|
||||
? probeHeaders(domain).catch((err) => {
|
||||
console.error(
|
||||
`[hosting] headers probe error ${domain}`,
|
||||
err instanceof Error ? err : new Error(String(err)),
|
||||
);
|
||||
logger.error(`headers probe error ${domain}`, err, { domain });
|
||||
return {
|
||||
headers: [] as { name: string; value: string }[],
|
||||
status: 0,
|
||||
@@ -274,15 +278,17 @@ export const detectHosting = cache(async function detectHosting(
|
||||
dueAtMs,
|
||||
existingDomain.lastAccessedAt ?? null,
|
||||
).catch((err) => {
|
||||
console.warn(
|
||||
`[hosting] schedule failed for ${registrable}`,
|
||||
err instanceof Error ? err : new Error(String(err)),
|
||||
);
|
||||
logger.error("schedule failed", err, {
|
||||
domain: registrable,
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
console.info(
|
||||
`[hosting] ok ${registrable} hosting=${hostingName} email=${emailName} dns=${dnsName}`,
|
||||
);
|
||||
logger.info(`ok ${registrable}`, {
|
||||
domain: registrable,
|
||||
hosting: hostingName,
|
||||
email: emailName,
|
||||
dns: dnsName,
|
||||
});
|
||||
return info;
|
||||
});
|
||||
|
||||
@@ -1,4 +1,7 @@
|
||||
import { cache } from "react";
|
||||
import { createLogger } from "@/lib/logger/server";
|
||||
|
||||
const logger = createLogger({ source: "ip" });
|
||||
|
||||
/**
|
||||
* Lookup IP metadata including geolocation and ownership information.
|
||||
@@ -21,7 +24,7 @@ export const lookupIpMeta = cache(async function lookupIpMeta(
|
||||
owner: string | null;
|
||||
domain: string | null;
|
||||
}> {
|
||||
console.debug(`[ip] start lookup for ${ip}`);
|
||||
logger.debug(`start lookup for ${ip}`, { type: "ip" });
|
||||
try {
|
||||
// Add timeout to prevent hanging requests to upstream IP service
|
||||
const controller = new AbortController();
|
||||
@@ -34,7 +37,11 @@ export const lookupIpMeta = cache(async function lookupIpMeta(
|
||||
clearTimeout(timeoutId);
|
||||
|
||||
if (!res.ok) {
|
||||
console.error(`[ip] error looking up ${ip}`, res.statusText);
|
||||
logger.error(`error looking up ${ip}`, undefined, {
|
||||
type: "ip",
|
||||
status: res.status,
|
||||
statusMessage: res.statusText,
|
||||
});
|
||||
throw new Error(`Upstream error looking up IP metadata: ${res.status}`);
|
||||
}
|
||||
|
||||
@@ -91,9 +98,11 @@ export const lookupIpMeta = cache(async function lookupIpMeta(
|
||||
lon: typeof data.longitude === "number" ? data.longitude : null,
|
||||
};
|
||||
|
||||
console.info(
|
||||
`[ip] ok ${ip} owner=${owner || "none"} domain=${domain || "none"}`,
|
||||
);
|
||||
logger.info(`ok ${ip}`, {
|
||||
type: "ip",
|
||||
owner: owner || "none",
|
||||
domain: domain || "none",
|
||||
});
|
||||
|
||||
return { geo, owner, domain };
|
||||
} catch (fetchErr) {
|
||||
@@ -102,10 +111,7 @@ export const lookupIpMeta = cache(async function lookupIpMeta(
|
||||
throw fetchErr;
|
||||
}
|
||||
} catch (err) {
|
||||
console.warn(
|
||||
`[ip] error looking up ${ip}`,
|
||||
err instanceof Error ? err : new Error(String(err)),
|
||||
);
|
||||
logger.error(`error looking up ${ip}`, err, { type: "ip" });
|
||||
return {
|
||||
owner: null,
|
||||
domain: null,
|
||||
|
||||
@@ -1,7 +1,10 @@
|
||||
import { cacheLife, cacheTag } from "next/cache";
|
||||
import { getDomainTld } from "rdapper";
|
||||
import { createLogger } from "@/lib/logger/server";
|
||||
import type { Pricing } from "@/lib/schemas";
|
||||
|
||||
const logger = createLogger({ source: "pricing" });
|
||||
|
||||
/**
|
||||
* Domain registration pricing service.
|
||||
*
|
||||
@@ -50,13 +53,12 @@ async function fetchProviderPricing(
|
||||
): Promise<RegistrarPricingResponse | null> {
|
||||
try {
|
||||
const payload = await provider.fetchPricing();
|
||||
console.info(`[pricing] fetch ok ${provider.name}`);
|
||||
logger.info(`fetch ok ${provider.name}`, { provider: provider.name });
|
||||
return payload;
|
||||
} catch (err) {
|
||||
console.error(
|
||||
`[pricing] fetch error ${provider.name}`,
|
||||
err instanceof Error ? err : new Error(String(err)),
|
||||
);
|
||||
logger.error(`fetch error ${provider.name}`, err, {
|
||||
provider: provider.name,
|
||||
});
|
||||
return null;
|
||||
}
|
||||
}
|
||||
@@ -90,7 +92,10 @@ const porkbunProvider: PricingProvider = {
|
||||
);
|
||||
|
||||
if (!res.ok) {
|
||||
console.error(`[pricing] upstream error porkbun status=${res.status}`);
|
||||
logger.error(`upstream error porkbun status=${res.status}`, undefined, {
|
||||
provider: "porkbun",
|
||||
status: res.status,
|
||||
});
|
||||
throw new Error(`Porkbun API returned ${res.status}`);
|
||||
}
|
||||
|
||||
@@ -101,7 +106,7 @@ const porkbunProvider: PricingProvider = {
|
||||
} catch (err) {
|
||||
// Translate AbortError into a retryable timeout error
|
||||
if (err instanceof Error && err.name === "AbortError") {
|
||||
console.error("[pricing] upstream timeout porkbun");
|
||||
logger.error("upstream timeout porkbun", err, { provider: "porkbun" });
|
||||
throw new Error("Porkbun API request timed out");
|
||||
}
|
||||
throw err;
|
||||
@@ -136,8 +141,13 @@ const cloudflareProvider: PricingProvider = {
|
||||
});
|
||||
|
||||
if (!res.ok) {
|
||||
console.error(
|
||||
`[pricing] upstream error cloudflare status=${res.status}`,
|
||||
logger.error(
|
||||
`upstream error cloudflare status=${res.status}`,
|
||||
undefined,
|
||||
{
|
||||
provider: "cloudflare",
|
||||
status: res.status,
|
||||
},
|
||||
);
|
||||
throw new Error(`Cloudflare pricing API returned ${res.status}`);
|
||||
}
|
||||
@@ -166,7 +176,9 @@ const cloudflareProvider: PricingProvider = {
|
||||
} catch (err) {
|
||||
// Translate AbortError into a retryable timeout error
|
||||
if (err instanceof Error && err.name === "AbortError") {
|
||||
console.error("[pricing] upstream timeout cloudflare");
|
||||
logger.error("upstream timeout cloudflare", err, {
|
||||
provider: "cloudflare",
|
||||
});
|
||||
throw new Error("Cloudflare pricing API request timed out");
|
||||
}
|
||||
throw err;
|
||||
|
||||
@@ -222,11 +222,6 @@ describe("getRegistration", () => {
|
||||
record: null,
|
||||
});
|
||||
|
||||
const consoleSpy = vi.spyOn(console, "info").mockImplementation(() => {});
|
||||
const consoleErrorSpy = vi
|
||||
.spyOn(console, "error")
|
||||
.mockImplementation(() => {});
|
||||
|
||||
const { getRegistration } = await import("./registration");
|
||||
const rec = await getRegistration("whois.ls");
|
||||
|
||||
@@ -238,16 +233,8 @@ describe("getRegistration", () => {
|
||||
expect(rec.registrarProvider.name).toBeNull();
|
||||
expect(rec.registrarProvider.domain).toBeNull();
|
||||
|
||||
// Should log as info (not error) since this is a known limitation
|
||||
expect(consoleSpy).toHaveBeenCalledWith(
|
||||
expect.stringContaining("[registration] unavailable"),
|
||||
);
|
||||
|
||||
// Should NOT log as error
|
||||
expect(consoleErrorSpy).not.toHaveBeenCalled();
|
||||
|
||||
consoleSpy.mockRestore();
|
||||
consoleErrorSpy.mockRestore();
|
||||
// Note: Logger calls are tested by integration - the service calls logger.info()
|
||||
// which is mocked in vitest.setup.ts to not actually log anything
|
||||
});
|
||||
|
||||
it("handles TLDs with unresponsive WHOIS servers gracefully (timeout)", async () => {
|
||||
@@ -260,11 +247,6 @@ describe("getRegistration", () => {
|
||||
record: null,
|
||||
});
|
||||
|
||||
const consoleSpy = vi.spyOn(console, "info").mockImplementation(() => {});
|
||||
const consoleErrorSpy = vi
|
||||
.spyOn(console, "error")
|
||||
.mockImplementation(() => {});
|
||||
|
||||
const { getRegistration } = await import("./registration");
|
||||
const rec = await getRegistration("timeout.ls");
|
||||
|
||||
@@ -274,16 +256,8 @@ describe("getRegistration", () => {
|
||||
expect(rec.isRegistered).toBe(false);
|
||||
expect(rec.source).toBeNull();
|
||||
|
||||
// Should log as info (not error) since timeouts indicate unavailable WHOIS
|
||||
expect(consoleSpy).toHaveBeenCalledWith(
|
||||
expect.stringContaining("[registration] unavailable"),
|
||||
);
|
||||
|
||||
// Should NOT log as error
|
||||
expect(consoleErrorSpy).not.toHaveBeenCalled();
|
||||
|
||||
consoleSpy.mockRestore();
|
||||
consoleErrorSpy.mockRestore();
|
||||
// Note: Logger calls are tested by integration - the service calls logger.info()
|
||||
// which is mocked in vitest.setup.ts to not actually log anything
|
||||
});
|
||||
|
||||
it("logs actual registration errors as errors (timeout, network failure)", async () => {
|
||||
@@ -296,13 +270,6 @@ describe("getRegistration", () => {
|
||||
record: null,
|
||||
});
|
||||
|
||||
const consoleErrorSpy = vi
|
||||
.spyOn(console, "error")
|
||||
.mockImplementation(() => {});
|
||||
const consoleInfoSpy = vi
|
||||
.spyOn(console, "info")
|
||||
.mockImplementation(() => {});
|
||||
|
||||
const { getRegistration } = await import("./registration");
|
||||
|
||||
// Should throw error
|
||||
@@ -310,18 +277,7 @@ describe("getRegistration", () => {
|
||||
"Registration lookup failed for timeout.test: Connection timeout after 5000ms",
|
||||
);
|
||||
|
||||
// Should log as error since this is unexpected
|
||||
expect(consoleErrorSpy).toHaveBeenCalledWith(
|
||||
expect.stringContaining("[registration] error"),
|
||||
expect.any(Error),
|
||||
);
|
||||
|
||||
// Should NOT log as info (unavailable)
|
||||
expect(consoleInfoSpy).not.toHaveBeenCalledWith(
|
||||
expect.stringContaining("[registration] unavailable"),
|
||||
);
|
||||
|
||||
consoleErrorSpy.mockRestore();
|
||||
consoleInfoSpy.mockRestore();
|
||||
// Note: Logger calls are tested by integration - the service calls logger.error()
|
||||
// which is mocked in vitest.setup.ts to not actually log anything
|
||||
});
|
||||
});
|
||||
|
||||
@@ -7,12 +7,15 @@ import { resolveOrCreateProviderId } from "@/lib/db/repos/providers";
|
||||
import { upsertRegistration } from "@/lib/db/repos/registrations";
|
||||
import { domains, providers, registrations } from "@/lib/db/schema";
|
||||
import { toRegistrableDomain } from "@/lib/domain-server";
|
||||
import { createLogger } from "@/lib/logger/server";
|
||||
import { detectRegistrar } from "@/lib/providers/detection";
|
||||
import { getRdapBootstrapData } from "@/lib/rdap-bootstrap";
|
||||
import { scheduleRevalidation } from "@/lib/schedule";
|
||||
import type { Registration, RegistrationContacts } from "@/lib/schemas";
|
||||
import { ttlForRegistration } from "@/lib/ttl";
|
||||
|
||||
const logger = createLogger({ source: "registration" });
|
||||
|
||||
/**
|
||||
* Normalize registrar provider information from raw rdapper data.
|
||||
* Applies provider detection and falls back to URL hostname parsing.
|
||||
@@ -52,7 +55,7 @@ function normalizeRegistrar(registrar?: { name?: unknown; url?: unknown }): {
|
||||
* Fetch domain registration using rdapper and cache the normalized DomainRecord.
|
||||
*/
|
||||
export async function getRegistration(domain: string): Promise<Registration> {
|
||||
console.debug(`[registration] start ${domain}`);
|
||||
logger.debug(`start ${domain}`, { domain });
|
||||
|
||||
// Only support registrable domains (no subdomains, IPs, or invalid TLDs)
|
||||
const registrable = toRegistrableDomain(domain);
|
||||
@@ -123,16 +126,18 @@ export async function getRegistration(domain: string): Promise<Registration> {
|
||||
row.registration.expiresAt.getTime(),
|
||||
row.domainLastAccessedAt ?? null,
|
||||
).catch((err) => {
|
||||
console.warn(
|
||||
`[registration] schedule failed for ${registrable}`,
|
||||
err instanceof Error ? err : new Error(String(err)),
|
||||
);
|
||||
logger.error("schedule failed", err, {
|
||||
domain: registrable,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
console.info(
|
||||
`[registration] ok cached ${registrable} registered=${row.registration.isRegistered} registrar=${registrarProvider.name}`,
|
||||
);
|
||||
logger.info(`ok cached ${registrable}`, {
|
||||
domain: registrable,
|
||||
isRegistered: row.registration.isRegistered,
|
||||
registrar: registrarProvider.name,
|
||||
cached: true,
|
||||
});
|
||||
|
||||
return response;
|
||||
}
|
||||
@@ -151,9 +156,10 @@ export async function getRegistration(domain: string): Promise<Registration> {
|
||||
const isKnownLimitation = isExpectedRegistrationError(error);
|
||||
|
||||
if (isKnownLimitation) {
|
||||
console.info(
|
||||
`[registration] unavailable ${registrable} reason=${error || "unknown"}`,
|
||||
);
|
||||
logger.info("unavailable", {
|
||||
domain: registrable,
|
||||
reason: error || "unknown",
|
||||
});
|
||||
|
||||
// Return minimal unregistered response for TLDs without WHOIS/RDAP
|
||||
// (We can't determine registration status without WHOIS/RDAP access)
|
||||
@@ -173,18 +179,16 @@ export async function getRegistration(domain: string): Promise<Registration> {
|
||||
const err = new Error(
|
||||
`Registration lookup failed for ${registrable}: ${error || "unknown error"}`,
|
||||
);
|
||||
console.error(
|
||||
`[registration] error ${registrable}`,
|
||||
err instanceof Error ? err : new Error(String(err)),
|
||||
);
|
||||
logger.error("lookup failed", err, { domain: registrable });
|
||||
throw err;
|
||||
}
|
||||
|
||||
// If unregistered, return response without persisting to Postgres
|
||||
if (!record.isRegistered) {
|
||||
console.info(
|
||||
`[registration] ok ${registrable} unregistered (not persisted)`,
|
||||
);
|
||||
logger.info(`ok ${registrable} unregistered (not persisted)`, {
|
||||
domain: registrable,
|
||||
isRegistered: false,
|
||||
});
|
||||
|
||||
const registrarProvider = normalizeRegistrar(record.registrar ?? {});
|
||||
|
||||
@@ -301,16 +305,17 @@ export async function getRegistration(domain: string): Promise<Registration> {
|
||||
expiresAt.getTime(),
|
||||
domainRecord.lastAccessedAt ?? null,
|
||||
).catch((err) => {
|
||||
console.warn(
|
||||
`[registration] schedule failed for ${registrable}`,
|
||||
err instanceof Error ? err : new Error(String(err)),
|
||||
);
|
||||
logger.error("schedule failed", err, {
|
||||
domain: registrable,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
console.info(
|
||||
`[registration] ok ${registrable} registered=${record.isRegistered} registrar=${withProvider.registrarProvider.name}`,
|
||||
);
|
||||
logger.info(`ok ${registrable}`, {
|
||||
domain: registrable,
|
||||
isRegistered: record.isRegistered,
|
||||
registrar: withProvider.registrarProvider.name,
|
||||
});
|
||||
|
||||
return withProvider;
|
||||
}
|
||||
|
||||
@@ -7,10 +7,13 @@ import {
|
||||
} from "@/lib/db/repos/screenshots";
|
||||
import { toRegistrableDomain } from "@/lib/domain-server";
|
||||
import { addWatermarkToScreenshot, optimizeImageCover } from "@/lib/image";
|
||||
import { createLogger } from "@/lib/logger/server";
|
||||
import { getBrowser } from "@/lib/puppeteer";
|
||||
import { storeImage } from "@/lib/storage";
|
||||
import { ttlForScreenshot } from "@/lib/ttl";
|
||||
|
||||
const logger = createLogger({ source: "screenshot" });
|
||||
|
||||
const VIEWPORT_WIDTH = 1200;
|
||||
const VIEWPORT_HEIGHT = 630;
|
||||
const NAV_TIMEOUT_MS = 8000;
|
||||
@@ -60,7 +63,7 @@ export async function getOrCreateScreenshotBlobUrl(
|
||||
|
||||
// Check for in-flight request
|
||||
if (screenshotPromises.has(registrable)) {
|
||||
console.debug("[screenshot] in-flight request hit");
|
||||
logger.debug("in-flight request hit", { domain: registrable });
|
||||
// biome-ignore lint/style/noNonNullAssertion: checked above
|
||||
return screenshotPromises.get(registrable)!;
|
||||
}
|
||||
@@ -82,9 +85,10 @@ export async function getOrCreateScreenshotBlobUrl(
|
||||
// This catches edge cases where promise never settles
|
||||
const timeoutId = setTimeout(() => {
|
||||
if (screenshotPromises.get(registrable) === promise) {
|
||||
console.warn(
|
||||
`[screenshot] cleaning up stale promise for ${registrable} after ${PROMISE_CLEANUP_TIMEOUT_MS}ms`,
|
||||
);
|
||||
logger.warn(`cleaning up stale promise for ${registrable}`, {
|
||||
domain: registrable,
|
||||
timeoutMs: PROMISE_CLEANUP_TIMEOUT_MS,
|
||||
});
|
||||
screenshotPromises.delete(registrable);
|
||||
}
|
||||
}, PROMISE_CLEANUP_TIMEOUT_MS);
|
||||
@@ -94,9 +98,9 @@ export async function getOrCreateScreenshotBlobUrl(
|
||||
|
||||
// Log map size for monitoring
|
||||
if (screenshotPromises.size > 10) {
|
||||
console.warn(
|
||||
`[screenshot] promise map size: ${screenshotPromises.size} (potential memory pressure)`,
|
||||
);
|
||||
logger.warn("promise map size high (potential memory pressure)", {
|
||||
count: screenshotPromises.size,
|
||||
});
|
||||
}
|
||||
|
||||
return promise;
|
||||
@@ -131,16 +135,13 @@ async function generateScreenshot(
|
||||
screenshotRecord.url !== null || screenshotRecord.notFound === true;
|
||||
|
||||
if (isDefinitiveResult) {
|
||||
console.debug("[screenshot] db cache hit");
|
||||
logger.debug("db cache hit", { domain: registrable, cached: true });
|
||||
return { url: screenshotRecord.url };
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
console.warn(
|
||||
"[screenshot] db read failed",
|
||||
err instanceof Error ? err : new Error(String(err)),
|
||||
);
|
||||
logger.error("db read failed", err, { domain: registrable });
|
||||
}
|
||||
|
||||
// Generate screenshot (cache missed)
|
||||
@@ -224,10 +225,7 @@ async function generateScreenshot(
|
||||
expiresAt,
|
||||
});
|
||||
} catch (err) {
|
||||
console.error(
|
||||
"[screenshot] db persist error",
|
||||
err instanceof Error ? err : new Error(String(err)),
|
||||
);
|
||||
logger.error("db persist error", err, { domain: registrable });
|
||||
}
|
||||
|
||||
resultUrl = storedUrl;
|
||||
@@ -247,10 +245,9 @@ async function generateScreenshot(
|
||||
try {
|
||||
await page.close();
|
||||
} catch (err) {
|
||||
console.warn(
|
||||
"[screenshot] failed to close page",
|
||||
err instanceof Error ? err : new Error(String(err)),
|
||||
);
|
||||
logger.error("failed to close page", err, {
|
||||
domain: registrable,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -279,10 +276,7 @@ async function generateScreenshot(
|
||||
expiresAt,
|
||||
});
|
||||
} catch (err) {
|
||||
console.error(
|
||||
"[screenshot] db persist error (null)",
|
||||
err instanceof Error ? err : new Error(String(err)),
|
||||
);
|
||||
logger.error("db persist error (null)", err, { domain: registrable });
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
|
||||
@@ -12,6 +12,7 @@ import {
|
||||
} from "@/lib/fetch";
|
||||
import { fetchRemoteAsset } from "@/lib/fetch-remote-asset";
|
||||
import { optimizeImageCover } from "@/lib/image";
|
||||
import { createLogger } from "@/lib/logger/server";
|
||||
import { scheduleRevalidation } from "@/lib/schedule";
|
||||
import type {
|
||||
GeneralMeta,
|
||||
@@ -24,12 +25,14 @@ import { parseHtmlMeta, parseRobotsTxt, selectPreview } from "@/lib/seo";
|
||||
import { storeImage } from "@/lib/storage";
|
||||
import { ttlForSeo } from "@/lib/ttl";
|
||||
|
||||
const logger = createLogger({ source: "seo" });
|
||||
|
||||
const SOCIAL_WIDTH = 1200;
|
||||
const SOCIAL_HEIGHT = 630;
|
||||
const MAX_REMOTE_IMAGE_BYTES = 5 * 1024 * 1024; // 5MB
|
||||
|
||||
export async function getSeo(domain: string): Promise<SeoResponse> {
|
||||
console.debug(`[seo] start ${domain}`);
|
||||
logger.debug(`start ${domain}`, { domain });
|
||||
|
||||
// Only support registrable domains (no subdomains, IPs, or invalid TLDs)
|
||||
const registrable = toRegistrableDomain(domain);
|
||||
@@ -274,17 +277,20 @@ export async function getSeo(domain: string): Promise<SeoResponse> {
|
||||
dueAtMs,
|
||||
existingDomain.lastAccessedAt ?? null,
|
||||
).catch((err) => {
|
||||
console.warn(
|
||||
`[seo] schedule failed for ${registrable}`,
|
||||
err instanceof Error ? err : new Error(String(err)),
|
||||
);
|
||||
logger.error("schedule failed", err, {
|
||||
domain: registrable,
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
console.info(
|
||||
`[seo] ok ${registrable} status=${status ?? -1} has_meta=${!!meta} has_robots=${!!robots} has_errors=${Boolean(htmlError || robotsError)}`,
|
||||
);
|
||||
logger.info(`ok ${registrable}`, {
|
||||
domain: registrable,
|
||||
status: status ?? -1,
|
||||
has_meta: !!meta,
|
||||
has_robots: !!robots,
|
||||
has_errors: Boolean(htmlError || robotsError),
|
||||
});
|
||||
|
||||
return response;
|
||||
}
|
||||
|
||||
111
trpc/init.ts
111
trpc/init.ts
@@ -4,6 +4,8 @@ import { after } from "next/server";
|
||||
import superjson from "superjson";
|
||||
import { updateLastAccessed } from "@/lib/db/repos/domains";
|
||||
import { toRegistrableDomain } from "@/lib/domain-server";
|
||||
import { getOrGenerateCorrelationId } from "@/lib/logger/correlation";
|
||||
import { setCorrelationId } from "@/lib/logger/server";
|
||||
|
||||
const IP_HEADERS = ["x-real-ip", "x-forwarded-for", "cf-connecting-ip"];
|
||||
|
||||
@@ -31,7 +33,18 @@ export const createContext = async (opts?: { req?: Request }) => {
|
||||
const req = opts?.req;
|
||||
const ip = await resolveRequestIp();
|
||||
|
||||
return { req, ip } as const;
|
||||
// Generate or extract correlation ID for request tracing
|
||||
let correlationId: string | undefined;
|
||||
try {
|
||||
const headerList = await headers();
|
||||
correlationId = getOrGenerateCorrelationId(headerList);
|
||||
// Set in AsyncLocalStorage for propagation
|
||||
setCorrelationId(correlationId);
|
||||
} catch {
|
||||
// headers() not available (tests/scripts)
|
||||
}
|
||||
|
||||
return { req, ip, correlationId } as const;
|
||||
};
|
||||
|
||||
export type Context = Awaited<ReturnType<typeof createContext>>;
|
||||
@@ -48,63 +61,32 @@ export const createCallerFactory = t.createCallerFactory;
|
||||
|
||||
/**
|
||||
* Middleware to log the start, end, and duration of a procedure.
|
||||
* All logs are structured JSON for better parsing by log aggregators.
|
||||
* All logs are structured JSON with OpenTelemetry tracing and correlation IDs.
|
||||
* Errors are tracked in PostHog for centralized monitoring.
|
||||
* @param path - The path of the procedure
|
||||
* @param type - The type of the procedure
|
||||
* @param input - The input to the procedure
|
||||
* @param next - The next middleware
|
||||
* @returns The result of the next middleware
|
||||
*/
|
||||
const withLogging = t.middleware(async ({ path, type, input, next }) => {
|
||||
const start = performance.now();
|
||||
|
||||
// Sample input for debugging (only log safe fields, avoid PII)
|
||||
const inputSample =
|
||||
input && typeof input === "object"
|
||||
? Object.keys(input).reduce(
|
||||
(acc, key) => {
|
||||
// Log only safe fields, truncate long values
|
||||
if (
|
||||
key === "domain" ||
|
||||
key === "type" ||
|
||||
key === "types" ||
|
||||
key === "limit"
|
||||
) {
|
||||
const value = (input as Record<string, unknown>)[key];
|
||||
acc[key] = String(value).slice(0, 100);
|
||||
}
|
||||
return acc;
|
||||
},
|
||||
{} as Record<string, string>,
|
||||
)
|
||||
: undefined;
|
||||
// Import logger (dynamic to avoid circular deps)
|
||||
const { logger } = await import("@/lib/logger/server");
|
||||
|
||||
console.debug(
|
||||
JSON.stringify({
|
||||
level: "debug",
|
||||
message: "[trpc] start",
|
||||
path,
|
||||
type,
|
||||
input: inputSample,
|
||||
timestamp: new Date().toISOString(),
|
||||
}),
|
||||
);
|
||||
// Log procedure start
|
||||
logger.debug(`[trpc] start ${path}`, {
|
||||
path,
|
||||
type,
|
||||
...(input && typeof input === "object" ? { ...input } : {}),
|
||||
});
|
||||
|
||||
try {
|
||||
const result = await next();
|
||||
const durationMs = Math.round(performance.now() - start);
|
||||
|
||||
console.info(
|
||||
JSON.stringify({
|
||||
level: "info",
|
||||
message: "[trpc] ok",
|
||||
path,
|
||||
type,
|
||||
durationMs,
|
||||
timestamp: new Date().toISOString(),
|
||||
}),
|
||||
);
|
||||
// Log successful completion
|
||||
logger.info(`[trpc] ok ${path}`, {
|
||||
path,
|
||||
type,
|
||||
durationMs,
|
||||
});
|
||||
|
||||
// Track slow requests (>5s threshold) in PostHog
|
||||
if (durationMs > 5000) {
|
||||
@@ -121,30 +103,11 @@ const withLogging = t.middleware(async ({ path, type, input, next }) => {
|
||||
const durationMs = Math.round(performance.now() - start);
|
||||
const error = err instanceof Error ? err : new Error(String(err));
|
||||
|
||||
console.error(
|
||||
JSON.stringify({
|
||||
level: "error",
|
||||
message: "[trpc] error",
|
||||
path,
|
||||
type,
|
||||
durationMs,
|
||||
timestamp: new Date().toISOString(),
|
||||
error: {
|
||||
message: error.message,
|
||||
name: error.name,
|
||||
stack: error.stack,
|
||||
cause: error.cause,
|
||||
},
|
||||
}),
|
||||
);
|
||||
|
||||
// Track exceptions in PostHog for centralized monitoring
|
||||
const { analytics } = await import("@/lib/analytics/server");
|
||||
analytics.trackException(error, {
|
||||
// Log error with full details
|
||||
logger.error(`[trpc] error ${path}`, error, {
|
||||
path,
|
||||
type,
|
||||
durationMs,
|
||||
source: "trpc",
|
||||
});
|
||||
|
||||
throw err;
|
||||
@@ -166,14 +129,10 @@ const withDomainAccessUpdate = t.middleware(async ({ input, next }) => {
|
||||
) {
|
||||
const registrable = toRegistrableDomain(input.domain);
|
||||
if (registrable) {
|
||||
console.debug(
|
||||
JSON.stringify({
|
||||
level: "debug",
|
||||
message: "[trpc] recording access for domain",
|
||||
domain: registrable,
|
||||
timestamp: new Date().toISOString(),
|
||||
}),
|
||||
);
|
||||
const { logger } = await import("@/lib/logger/server");
|
||||
logger.debug("[trpc] recording access for domain", {
|
||||
domain: registrable,
|
||||
});
|
||||
after(() => updateLastAccessed(registrable));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -22,6 +22,48 @@ vi.mock("@/lib/analytics/client", () => ({
|
||||
// Make server-only a no-op so we can import server modules in tests
|
||||
vi.mock("server-only", () => ({}));
|
||||
|
||||
// Mock logger to avoid noise in tests
|
||||
vi.mock("@/lib/logger/server", () => ({
|
||||
logger: {
|
||||
trace: vi.fn(),
|
||||
debug: vi.fn(),
|
||||
info: vi.fn(),
|
||||
warn: vi.fn(),
|
||||
error: vi.fn(),
|
||||
fatal: vi.fn(),
|
||||
},
|
||||
createLogger: vi.fn(() => ({
|
||||
trace: vi.fn(),
|
||||
debug: vi.fn(),
|
||||
info: vi.fn(),
|
||||
warn: vi.fn(),
|
||||
error: vi.fn(),
|
||||
fatal: vi.fn(),
|
||||
})),
|
||||
setCorrelationId: vi.fn(),
|
||||
getCorrelationId: vi.fn(() => undefined),
|
||||
withCorrelationId: vi.fn((_id: string, fn: () => unknown) => fn()),
|
||||
}));
|
||||
|
||||
vi.mock("@/lib/logger/client", () => ({
|
||||
logger: {
|
||||
trace: vi.fn(),
|
||||
debug: vi.fn(),
|
||||
info: vi.fn(),
|
||||
warn: vi.fn(),
|
||||
error: vi.fn(),
|
||||
fatal: vi.fn(),
|
||||
},
|
||||
createLogger: vi.fn(() => ({
|
||||
trace: vi.fn(),
|
||||
debug: vi.fn(),
|
||||
info: vi.fn(),
|
||||
warn: vi.fn(),
|
||||
error: vi.fn(),
|
||||
fatal: vi.fn(),
|
||||
})),
|
||||
}));
|
||||
|
||||
// Mock Next.js after() to execute callbacks immediately in tests
|
||||
// In production, after() schedules work after the response is sent
|
||||
vi.mock("next/server", async () => {
|
||||
|
||||
Reference in New Issue
Block a user