From b88fc8ead79d7ec5c4ee2c370554ba99fc9cf650 Mon Sep 17 00:00:00 2001 From: Claude Date: Wed, 19 Nov 2025 00:04:28 +0000 Subject: [PATCH] feat: add Next.js metrics dashboard for real-time visualization Add a lightweight Next.js dashboard to visualize OpenProxy metrics in real-time. The dashboard provides comprehensive insights into LLM API usage, costs, and performance. Features: - Real-time metrics overview (requests, tokens, costs, response times) - Model breakdown with usage statistics - Hourly trends visualization with charts - Recent requests table with detailed information - Auto-refresh every 30 seconds - Configurable time ranges (1h, 6h, 24h, 7d) Technical details: - Built with Next.js 14 and React 18 - Uses Recharts for data visualization - Connects directly to PostgreSQL database - Runs on port 3008 by default - TypeScript for type safety - Minimal dependencies for lightweight deployment The dashboard complements the proxy server by providing a user-friendly interface for monitoring and analyzing LLM API usage patterns. --- README.md | 21 +++ dashboard/.env.example | 5 + dashboard/.gitignore | 34 ++++ dashboard/README.md | 195 ++++++++++++++++++++ dashboard/app/api/metrics/route.ts | 119 ++++++++++++ dashboard/app/layout.tsx | 33 ++++ dashboard/app/page.tsx | 221 +++++++++++++++++++++++ dashboard/components/MetricsOverview.tsx | 103 +++++++++++ dashboard/components/ModelBreakdown.tsx | 99 ++++++++++ dashboard/components/RecentRequests.tsx | 166 +++++++++++++++++ dashboard/components/TrendsChart.tsx | 160 ++++++++++++++++ dashboard/next.config.js | 6 + dashboard/package.json | 26 +++ dashboard/tsconfig.json | 27 +++ 14 files changed, 1215 insertions(+) create mode 100644 dashboard/.env.example create mode 100644 dashboard/.gitignore create mode 100644 dashboard/README.md create mode 100644 dashboard/app/api/metrics/route.ts create mode 100644 dashboard/app/layout.tsx create mode 100644 dashboard/app/page.tsx create mode 100644 dashboard/components/MetricsOverview.tsx create mode 100644 dashboard/components/ModelBreakdown.tsx create mode 100644 dashboard/components/RecentRequests.tsx create mode 100644 dashboard/components/TrendsChart.tsx create mode 100644 dashboard/next.config.js create mode 100644 dashboard/package.json create mode 100644 dashboard/tsconfig.json diff --git a/README.md b/README.md index d8007ed..37cec3b 100644 --- a/README.md +++ b/README.md @@ -148,6 +148,27 @@ Monitor your API usage through the PostgreSQL logs: - Identify usage patterns and optimize costs - Maintain compliance with audit requirements +### Metrics Dashboard + +OpenProxy includes a lightweight Next.js dashboard for real-time metrics visualization: + +```bash +cd dashboard +npm install +cp .env.example .env +# Configure DATABASE_URL in .env +npm run dev +``` + +The dashboard (available at `http://localhost:3008`) provides: +- **Real-time Overview**: Total requests, tokens, costs, and response times +- **Model Breakdown**: Usage statistics grouped by LLM model +- **Hourly Trends**: Visual charts showing request patterns over time +- **Recent Requests**: Detailed table of recent API calls +- **Auto-refresh**: Automatic updates every 30 seconds + +See [dashboard/README.md](./dashboard/README.md) for detailed setup instructions. + ## 🤝 Contributing Feel free to submit issues and enhancement requests! diff --git a/dashboard/.env.example b/dashboard/.env.example new file mode 100644 index 0000000..9c0bc76 --- /dev/null +++ b/dashboard/.env.example @@ -0,0 +1,5 @@ +# PostgreSQL connection string (same as proxy server) +DATABASE_URL=postgresql://user:password@localhost:5432/database + +# Database table name (default: llm_proxy) +DATABASE_TABLE=llm_proxy diff --git a/dashboard/.gitignore b/dashboard/.gitignore new file mode 100644 index 0000000..8ccc874 --- /dev/null +++ b/dashboard/.gitignore @@ -0,0 +1,34 @@ +# dependencies +/node_modules +/.pnp +.pnp.js + +# testing +/coverage + +# next.js +/.next/ +/out/ + +# production +/build + +# misc +.DS_Store +*.pem + +# debug +npm-debug.log* +yarn-debug.log* +yarn-error.log* + +# local env files +.env*.local +.env + +# vercel +.vercel + +# typescript +*.tsbuildinfo +next-env.d.ts diff --git a/dashboard/README.md b/dashboard/README.md new file mode 100644 index 0000000..49016ca --- /dev/null +++ b/dashboard/README.md @@ -0,0 +1,195 @@ +# OpenProxy Metrics Dashboard + +A lightweight Next.js dashboard for visualizing OpenProxy LLM request metrics in real-time. + +## Features + +- **Real-time Metrics Overview**: Total requests, tokens, costs, and response times +- **Model Breakdown**: Usage statistics grouped by LLM model +- **Hourly Trends**: Visual charts showing request patterns over time +- **Recent Requests**: Detailed table of recent API calls +- **Auto-refresh**: Automatic updates every 30 seconds +- **Time Range Selection**: View metrics for the last hour, 6 hours, 24 hours, or 7 days + +## Prerequisites + +- Node.js 18 or higher +- PostgreSQL database (same as the proxy server) +- OpenProxy proxy server running + +## Installation + +1. Navigate to the dashboard directory: + ```bash + cd dashboard + ``` + +2. Install dependencies: + ```bash + npm install + ``` + +3. Create a `.env` file (copy from `.env.example`): + ```bash + cp .env.example .env + ``` + +4. Configure your `.env` file: + ```env + DATABASE_URL=postgresql://user:password@localhost:5432/database + DATABASE_TABLE=llm_proxy + ``` + +## Running the Dashboard + +### Development Mode + +```bash +npm run dev +``` + +The dashboard will be available at `http://localhost:3008` + +### Production Mode + +1. Build the application: + ```bash + npm run build + ``` + +2. Start the production server: + ```bash + npm start + ``` + +## Dashboard Sections + +### 1. Overview Cards +Displays key metrics at a glance: +- Total requests processed +- Total tokens consumed +- Total cost incurred +- Average response time +- Number of unique models used +- Number of unique client IPs + +### 2. Hourly Trends +Two charts showing: +- Requests count and average response time over time +- Token usage and costs over time + +### 3. Model Breakdown +Table showing per-model statistics: +- Request count +- Total tokens used +- Total cost +- Average response time + +### 4. Recent Requests +Detailed table of recent API calls showing: +- Timestamp +- Model used +- Token breakdown (prompt + completion = total) +- Cost +- Response time +- HTTP status code +- Client IP address +- Whether the request was streamed + +## Configuration + +### Port +The dashboard runs on port 3008 by default. To change this, modify the `dev` and `start` scripts in `package.json`: + +```json +"dev": "next dev -p YOUR_PORT", +"start": "next start -p YOUR_PORT" +``` + +### Database Connection +Ensure the `DATABASE_URL` in your `.env` file matches the PostgreSQL connection string used by the proxy server. + +### Time Ranges +Available time ranges: +- Last Hour (1 hour) +- Last 6 Hours +- Last 24 Hours (default) +- Last 7 Days (168 hours) + +## Troubleshooting + +### "Failed to fetch metrics" Error +- Verify that the `DATABASE_URL` in `.env` is correct +- Ensure PostgreSQL is running and accessible +- Check that the `llm_proxy` table exists in your database +- Verify network connectivity to the database + +### Empty Dashboard +- Ensure the proxy server is running and processing requests +- Verify that requests are being logged to the database +- Check that the `DATABASE_TABLE` name matches your configuration + +### Port Conflicts +If port 3008 is already in use, change the port in `package.json` scripts. + +## Technology Stack + +- **Framework**: Next.js 14 (React 18) +- **Charts**: Recharts +- **Database**: PostgreSQL (via `pg` driver) +- **Language**: TypeScript +- **Styling**: Inline CSS (no external dependencies) + +## Architecture + +``` +dashboard/ +├── app/ +│ ├── api/ +│ │ └── metrics/ +│ │ └── route.ts # API endpoint for fetching metrics +│ ├── layout.tsx # Root layout +│ └── page.tsx # Main dashboard page +├── components/ +│ ├── MetricsOverview.tsx # Overview cards component +│ ├── ModelBreakdown.tsx # Model statistics table +│ ├── RecentRequests.tsx # Recent requests table +│ └── TrendsChart.tsx # Hourly trends charts +├── package.json +├── tsconfig.json +├── next.config.js +└── README.md +``` + +## API Endpoints + +### GET `/api/metrics` + +Query parameters: +- `hours` (optional): Number of hours to look back (default: 24) +- `limit` (optional): Maximum number of recent requests to return (default: 100) + +Response: +```json +{ + "success": true, + "data": { + "summary": { + "totalRequests": 1234, + "totalTokens": 567890, + "totalCost": 12.34, + "avgResponseTime": 450.5, + "uniqueModels": 3, + "uniqueClients": 15 + }, + "recentRequests": [...], + "modelBreakdown": [...], + "hourlyTrends": [...] + }, + "timeRange": "24 hours" +} +``` + +## License + +Same as OpenProxy parent project. diff --git a/dashboard/app/api/metrics/route.ts b/dashboard/app/api/metrics/route.ts new file mode 100644 index 0000000..d431166 --- /dev/null +++ b/dashboard/app/api/metrics/route.ts @@ -0,0 +1,119 @@ +import { NextRequest, NextResponse } from 'next/server'; +import { Pool } from 'pg'; + +const pool = new Pool({ + connectionString: process.env.DATABASE_URL, +}); + +const TABLE_NAME = process.env.DATABASE_TABLE || 'llm_proxy'; + +export async function GET(request: NextRequest) { + const { searchParams } = new URL(request.url); + const hours = parseInt(searchParams.get('hours') || '24', 10); + const limit = parseInt(searchParams.get('limit') || '100', 10); + + try { + const client = await pool.connect(); + + try { + // Get summary statistics + const summaryQuery = ` + SELECT + COUNT(*) as total_requests, + SUM(total_tokens) as total_tokens_used, + SUM(total_cost) as total_cost, + AVG(response_time) as avg_response_time, + COUNT(DISTINCT model) as unique_models, + COUNT(DISTINCT client_ip) as unique_clients + FROM ${TABLE_NAME} + WHERE timestamp >= NOW() - INTERVAL '${hours} hours' + `; + const summaryResult = await client.query(summaryQuery); + const summary = summaryResult.rows[0]; + + // Get recent requests + const recentQuery = ` + SELECT + request_id, + timestamp, + model, + prompt_tokens, + completion_tokens, + total_tokens, + total_cost, + response_time, + response_status, + client_ip, + stream + FROM ${TABLE_NAME} + WHERE timestamp >= NOW() - INTERVAL '${hours} hours' + ORDER BY timestamp DESC + LIMIT ${limit} + `; + const recentResult = await client.query(recentQuery); + const recentRequests = recentResult.rows; + + // Get model breakdown + const modelQuery = ` + SELECT + model, + COUNT(*) as request_count, + SUM(total_tokens) as total_tokens, + SUM(total_cost) as total_cost, + AVG(response_time) as avg_response_time + FROM ${TABLE_NAME} + WHERE timestamp >= NOW() - INTERVAL '${hours} hours' + GROUP BY model + ORDER BY request_count DESC + `; + const modelResult = await client.query(modelQuery); + const modelBreakdown = modelResult.rows; + + // Get hourly trends + const trendsQuery = ` + SELECT + DATE_TRUNC('hour', timestamp) as hour, + COUNT(*) as requests, + SUM(total_tokens) as tokens, + SUM(total_cost) as cost, + AVG(response_time) as avg_response_time + FROM ${TABLE_NAME} + WHERE timestamp >= NOW() - INTERVAL '${hours} hours' + GROUP BY hour + ORDER BY hour ASC + `; + const trendsResult = await client.query(trendsQuery); + const hourlyTrends = trendsResult.rows; + + return NextResponse.json({ + success: true, + data: { + summary: { + totalRequests: parseInt(summary.total_requests || '0'), + totalTokens: parseInt(summary.total_tokens_used || '0'), + totalCost: parseFloat(summary.total_cost || '0'), + avgResponseTime: parseFloat(summary.avg_response_time || '0'), + uniqueModels: parseInt(summary.unique_models || '0'), + uniqueClients: parseInt(summary.unique_clients || '0'), + }, + recentRequests, + modelBreakdown, + hourlyTrends, + }, + timeRange: `${hours} hours`, + }); + } finally { + client.release(); + } + } catch (error) { + console.error('Database error:', error); + return NextResponse.json( + { + success: false, + error: 'Failed to fetch metrics', + details: error instanceof Error ? error.message : 'Unknown error' + }, + { status: 500 } + ); + } +} diff --git a/dashboard/app/layout.tsx b/dashboard/app/layout.tsx new file mode 100644 index 0000000..5f9562a --- /dev/null +++ b/dashboard/app/layout.tsx @@ -0,0 +1,33 @@ +import type { Metadata } from 'next' + +export const metadata: Metadata = { + title: 'OpenProxy Metrics Dashboard', + description: 'Real-time metrics and analytics for OpenProxy LLM requests', +} + +export default function RootLayout({ + children, +}: { + children: React.ReactNode +}) { + return ( + + + + + {children} + + ) +} diff --git a/dashboard/app/page.tsx b/dashboard/app/page.tsx new file mode 100644 index 0000000..331a627 --- /dev/null +++ b/dashboard/app/page.tsx @@ -0,0 +1,221 @@ +'use client'; + +import { useEffect, useState } from 'react'; +import MetricsOverview from '@/components/MetricsOverview'; +import ModelBreakdown from '@/components/ModelBreakdown'; +import RecentRequests from '@/components/RecentRequests'; +import TrendsChart from '@/components/TrendsChart'; + +interface MetricsData { + summary: { + totalRequests: number; + totalTokens: number; + totalCost: number; + avgResponseTime: number; + uniqueModels: number; + uniqueClients: number; + }; + recentRequests: any[]; + modelBreakdown: any[]; + hourlyTrends: any[]; +} + +export default function Dashboard() { + const [data, setData] = useState(null); + const [loading, setLoading] = useState(true); + const [error, setError] = useState(null); + const [timeRange, setTimeRange] = useState(24); + const [autoRefresh, setAutoRefresh] = useState(true); + + const fetchMetrics = async () => { + try { + const response = await fetch(`/api/metrics?hours=${timeRange}`); + const result = await response.json(); + + if (result.success) { + setData(result.data); + setError(null); + } else { + setError(result.error || 'Failed to fetch metrics'); + } + } catch (err) { + setError('Network error: Unable to fetch metrics'); + console.error('Fetch error:', err); + } finally { + setLoading(false); + } + }; + + useEffect(() => { + fetchMetrics(); + }, [timeRange]); + + useEffect(() => { + if (!autoRefresh) return; + + const interval = setInterval(() => { + fetchMetrics(); + }, 30000); // Refresh every 30 seconds + + return () => clearInterval(interval); + }, [autoRefresh, timeRange]); + + if (loading) { + return ( +
+
Loading metrics...
+
+ ); + } + + if (error) { + return ( +
+
+

Error

+

{error}

+ +
+
+ ); + } + + if (!data) { + return ( +
+
No data available
+
+ ); + } + + return ( +
+
+

OpenProxy Metrics Dashboard

+
+ + + +
+
+ +
+ + + + +
+ +
+

Last updated: {new Date().toLocaleString()}

+
+
+ ); +} + +const styles = { + container: { + minHeight: '100vh', + backgroundColor: '#f5f7fa', + }, + header: { + backgroundColor: '#fff', + padding: '1.5rem 2rem', + boxShadow: '0 2px 4px rgba(0,0,0,0.1)', + display: 'flex', + justifyContent: 'space-between', + alignItems: 'center', + flexWrap: 'wrap' as const, + gap: '1rem', + }, + title: { + fontSize: '1.8rem', + color: '#2c3e50', + fontWeight: 600, + }, + controls: { + display: 'flex', + gap: '1rem', + alignItems: 'center', + }, + select: { + padding: '0.5rem 1rem', + borderRadius: '6px', + border: '1px solid #ddd', + fontSize: '0.9rem', + cursor: 'pointer', + }, + checkboxLabel: { + display: 'flex', + alignItems: 'center', + gap: '0.5rem', + fontSize: '0.9rem', + }, + refreshButton: { + padding: '0.5rem 1.5rem', + backgroundColor: '#3498db', + color: '#fff', + border: 'none', + borderRadius: '6px', + cursor: 'pointer', + fontSize: '0.9rem', + fontWeight: 500, + }, + main: { + maxWidth: '1400px', + margin: '0 auto', + padding: '2rem', + }, + loading: { + display: 'flex', + justifyContent: 'center', + alignItems: 'center', + height: '100vh', + fontSize: '1.2rem', + color: '#7f8c8d', + }, + error: { + display: 'flex', + flexDirection: 'column' as const, + justifyContent: 'center', + alignItems: 'center', + height: '100vh', + gap: '1rem', + color: '#e74c3c', + }, + retryButton: { + padding: '0.5rem 1.5rem', + backgroundColor: '#e74c3c', + color: '#fff', + border: 'none', + borderRadius: '6px', + cursor: 'pointer', + fontSize: '0.9rem', + }, + footer: { + textAlign: 'center' as const, + padding: '2rem', + color: '#7f8c8d', + fontSize: '0.9rem', + }, +}; diff --git a/dashboard/components/MetricsOverview.tsx b/dashboard/components/MetricsOverview.tsx new file mode 100644 index 0000000..0562e2b --- /dev/null +++ b/dashboard/components/MetricsOverview.tsx @@ -0,0 +1,103 @@ +interface MetricsOverviewProps { + summary: { + totalRequests: number; + totalTokens: number; + totalCost: number; + avgResponseTime: number; + uniqueModels: number; + uniqueClients: number; + }; +} + +export default function MetricsOverview({ summary }: MetricsOverviewProps) { + const metrics = [ + { + label: 'Total Requests', + value: summary.totalRequests.toLocaleString(), + icon: '📊', + }, + { + label: 'Total Tokens', + value: summary.totalTokens.toLocaleString(), + icon: '🔢', + }, + { + label: 'Total Cost', + value: `$${summary.totalCost.toFixed(4)}`, + icon: '💰', + }, + { + label: 'Avg Response Time', + value: `${Math.round(summary.avgResponseTime)}ms`, + icon: '⚡', + }, + { + label: 'Unique Models', + value: summary.uniqueModels.toString(), + icon: '🤖', + }, + { + label: 'Unique Clients', + value: summary.uniqueClients.toString(), + icon: '👥', + }, + ]; + + return ( +
+

Overview

+
+ {metrics.map((metric) => ( +
+
{metric.icon}
+
+
{metric.label}
+
{metric.value}
+
+
+ ))} +
+
+ ); +} + +const styles = { + container: { + marginBottom: '2rem', + }, + title: { + fontSize: '1.5rem', + marginBottom: '1.5rem', + color: '#2c3e50', + }, + grid: { + display: 'grid', + gridTemplateColumns: 'repeat(auto-fit, minmax(250px, 1fr))', + gap: '1rem', + }, + card: { + backgroundColor: '#fff', + padding: '1.5rem', + borderRadius: '8px', + boxShadow: '0 2px 4px rgba(0,0,0,0.1)', + display: 'flex', + gap: '1rem', + alignItems: 'center', + }, + icon: { + fontSize: '2rem', + }, + content: { + flex: 1, + }, + label: { + fontSize: '0.9rem', + color: '#7f8c8d', + marginBottom: '0.25rem', + }, + value: { + fontSize: '1.5rem', + fontWeight: 600, + color: '#2c3e50', + }, +}; diff --git a/dashboard/components/ModelBreakdown.tsx b/dashboard/components/ModelBreakdown.tsx new file mode 100644 index 0000000..9a9b420 --- /dev/null +++ b/dashboard/components/ModelBreakdown.tsx @@ -0,0 +1,99 @@ +interface ModelBreakdownProps { + models: { + model: string; + request_count: string; + total_tokens: string; + total_cost: string; + avg_response_time: string; + }[]; +} + +export default function ModelBreakdown({ models }: ModelBreakdownProps) { + if (!models || models.length === 0) { + return ( +
+

Model Breakdown

+
+

No model data available

+
+
+ ); + } + + return ( +
+

Model Breakdown

+
+ + + + + + + + + + + + {models.map((model) => ( + + + + + + + + ))} + +
ModelRequestsTotal TokensTotal CostAvg Response Time
+ {model.model} + {parseInt(model.request_count).toLocaleString()}{parseInt(model.total_tokens).toLocaleString()}${parseFloat(model.total_cost).toFixed(4)}{Math.round(parseFloat(model.avg_response_time))}ms
+
+
+ ); +} + +const styles = { + container: { + marginBottom: '2rem', + }, + title: { + fontSize: '1.5rem', + marginBottom: '1.5rem', + color: '#2c3e50', + }, + card: { + backgroundColor: '#fff', + borderRadius: '8px', + boxShadow: '0 2px 4px rgba(0,0,0,0.1)', + overflow: 'hidden', + }, + table: { + width: '100%', + borderCollapse: 'collapse' as const, + }, + headerRow: { + backgroundColor: '#f8f9fa', + }, + th: { + padding: '1rem', + textAlign: 'left' as const, + fontSize: '0.9rem', + fontWeight: 600, + color: '#2c3e50', + borderBottom: '2px solid #e9ecef', + }, + row: { + borderBottom: '1px solid #e9ecef', + }, + td: { + padding: '1rem', + fontSize: '0.9rem', + color: '#495057', + }, + noData: { + padding: '2rem', + textAlign: 'center' as const, + color: '#7f8c8d', + }, +}; diff --git a/dashboard/components/RecentRequests.tsx b/dashboard/components/RecentRequests.tsx new file mode 100644 index 0000000..0fe57eb --- /dev/null +++ b/dashboard/components/RecentRequests.tsx @@ -0,0 +1,166 @@ +interface RecentRequestsProps { + requests: { + request_id: string; + timestamp: string; + model: string; + prompt_tokens: number; + completion_tokens: number; + total_tokens: number; + total_cost: string; + response_time: number; + response_status: number; + client_ip: string; + stream: boolean; + }[]; +} + +export default function RecentRequests({ requests }: RecentRequestsProps) { + if (!requests || requests.length === 0) { + return ( +
+

Recent Requests

+
+

No recent requests

+
+
+ ); + } + + return ( +
+

Recent Requests

+
+
+ + + + + + + + + + + + + + + {requests.map((req) => ( + + + + + + + + + + + ))} + +
TimestampModelTokensCostResponse TimeStatusClient IPStream
+ {new Date(req.timestamp).toLocaleString()} + + {req.model} + +
+ + {req.prompt_tokens} + {req.completion_tokens} = {req.total_tokens} + +
+
${parseFloat(req.total_cost).toFixed(4)}{req.response_time}ms + + {req.response_status} + + {req.client_ip}{req.stream ? '✓' : '✗'}
+
+
+
+ ); +} + +const styles = { + container: { + marginBottom: '2rem', + }, + title: { + fontSize: '1.5rem', + marginBottom: '1.5rem', + color: '#2c3e50', + }, + card: { + backgroundColor: '#fff', + borderRadius: '8px', + boxShadow: '0 2px 4px rgba(0,0,0,0.1)', + overflow: 'hidden', + }, + tableWrapper: { + overflowX: 'auto' as const, + }, + table: { + width: '100%', + borderCollapse: 'collapse' as const, + minWidth: '1000px', + }, + headerRow: { + backgroundColor: '#f8f9fa', + }, + th: { + padding: '1rem', + textAlign: 'left' as const, + fontSize: '0.85rem', + fontWeight: 600, + color: '#2c3e50', + borderBottom: '2px solid #e9ecef', + }, + row: { + borderBottom: '1px solid #e9ecef', + }, + td: { + padding: '0.75rem 1rem', + fontSize: '0.85rem', + color: '#495057', + }, + modelBadge: { + backgroundColor: '#e3f2fd', + color: '#1976d2', + padding: '0.25rem 0.5rem', + borderRadius: '4px', + fontSize: '0.8rem', + fontWeight: 500, + }, + tokenBreakdown: { + display: 'flex', + flexDirection: 'column' as const, + }, + tokenDetail: { + color: '#7f8c8d', + fontSize: '0.75rem', + }, + statusBadge: { + padding: '0.25rem 0.5rem', + borderRadius: '4px', + fontSize: '0.8rem', + fontWeight: 500, + }, + statusSuccess: { + backgroundColor: '#d4edda', + color: '#155724', + }, + statusError: { + backgroundColor: '#f8d7da', + color: '#721c24', + }, + noData: { + padding: '2rem', + textAlign: 'center' as const, + color: '#7f8c8d', + }, +}; diff --git a/dashboard/components/TrendsChart.tsx b/dashboard/components/TrendsChart.tsx new file mode 100644 index 0000000..67f6473 --- /dev/null +++ b/dashboard/components/TrendsChart.tsx @@ -0,0 +1,160 @@ +'use client'; + +import { + LineChart, + Line, + XAxis, + YAxis, + CartesianGrid, + Tooltip, + Legend, + ResponsiveContainer, +} from 'recharts'; + +interface TrendsChartProps { + trends: { + hour: string; + requests: string; + tokens: string; + cost: string; + avg_response_time: string; + }[]; +} + +export default function TrendsChart({ trends }: TrendsChartProps) { + if (!trends || trends.length === 0) { + return ( +
+

Hourly Trends

+
+

No trend data available

+
+
+ ); + } + + const chartData = trends.map((trend) => ({ + time: new Date(trend.hour).toLocaleString('en-US', { + month: 'short', + day: 'numeric', + hour: '2-digit', + }), + requests: parseInt(trend.requests), + tokens: parseInt(trend.tokens), + cost: parseFloat(trend.cost), + responseTime: Math.round(parseFloat(trend.avg_response_time)), + })); + + return ( +
+

Hourly Trends

+
+
+

Requests & Response Time

+ + + + + + + + + + + + +
+ +
+

Tokens & Cost

+ + + + + + + + + + + + +
+
+
+ ); +} + +const styles = { + container: { + marginBottom: '2rem', + }, + title: { + fontSize: '1.5rem', + marginBottom: '1.5rem', + color: '#2c3e50', + }, + card: { + backgroundColor: '#fff', + borderRadius: '8px', + boxShadow: '0 2px 4px rgba(0,0,0,0.1)', + padding: '1.5rem', + }, + chartContainer: { + marginBottom: '2rem', + }, + chartTitle: { + fontSize: '1.1rem', + marginBottom: '1rem', + color: '#2c3e50', + }, + noData: { + padding: '2rem', + textAlign: 'center' as const, + color: '#7f8c8d', + }, +}; diff --git a/dashboard/next.config.js b/dashboard/next.config.js new file mode 100644 index 0000000..a843cbe --- /dev/null +++ b/dashboard/next.config.js @@ -0,0 +1,6 @@ +/** @type {import('next').NextConfig} */ +const nextConfig = { + reactStrictMode: true, +} + +module.exports = nextConfig diff --git a/dashboard/package.json b/dashboard/package.json new file mode 100644 index 0000000..017653d --- /dev/null +++ b/dashboard/package.json @@ -0,0 +1,26 @@ +{ + "name": "openproxy-dashboard", + "version": "1.0.0", + "description": "Lightweight Next.js dashboard for OpenProxy metrics", + "private": true, + "scripts": { + "dev": "next dev -p 3008", + "build": "next build", + "start": "next start -p 3008", + "lint": "next lint" + }, + "dependencies": { + "next": "^14.2.0", + "react": "^18.3.0", + "react-dom": "^18.3.0", + "pg": "^8.16.3", + "recharts": "^2.12.0" + }, + "devDependencies": { + "@types/node": "^20.0.0", + "@types/react": "^18.3.0", + "@types/react-dom": "^18.3.0", + "@types/pg": "^8.11.0", + "typescript": "^5.9.0" + } +} diff --git a/dashboard/tsconfig.json b/dashboard/tsconfig.json new file mode 100644 index 0000000..5ddf5a5 --- /dev/null +++ b/dashboard/tsconfig.json @@ -0,0 +1,27 @@ +{ + "compilerOptions": { + "target": "ES2022", + "lib": ["dom", "dom.iterable", "esnext"], + "allowJs": true, + "skipLibCheck": true, + "strict": true, + "noEmit": true, + "esModuleInterop": true, + "module": "esnext", + "moduleResolution": "bundler", + "resolveJsonModule": true, + "isolatedModules": true, + "jsx": "preserve", + "incremental": true, + "plugins": [ + { + "name": "next" + } + ], + "paths": { + "@/*": ["./*"] + } + }, + "include": ["next-env.d.ts", "**/*.ts", "**/*.tsx", ".next/types/**/*.ts"], + "exclude": ["node_modules"] +}