From 289ea3aedf5c5d3b91dc993a1a061cb37a84d152 Mon Sep 17 00:00:00 2001 From: Garry Tan Date: Thu, 26 Mar 2026 21:59:48 -0600 Subject: [PATCH] feat: design binary variants + iterate commands variants: generates N style variations with staggered parallel (1.5s between launches, exponential backoff on 429). 7 built-in style variations (bold, calm, warm, corporate, dark, playful + default). Tested: 3/3 variants in 41.6s. iterate: multi-turn design iteration using previous_response_id for conversational threading. Falls back to re-generation with accumulated feedback if threading doesn't retain visual context. Co-Authored-By: Claude Opus 4.6 (1M context) --- design/src/cli.ts | 21 ++++- design/src/iterate.ts | 179 +++++++++++++++++++++++++++++++++++++++++ design/src/variants.ts | 173 +++++++++++++++++++++++++++++++++++++++ 3 files changed, 372 insertions(+), 1 deletion(-) create mode 100644 design/src/iterate.ts create mode 100644 design/src/variants.ts diff --git a/design/src/cli.ts b/design/src/cli.ts index ba563fc2..0c491941 100644 --- a/design/src/cli.ts +++ b/design/src/cli.ts @@ -16,6 +16,8 @@ import { COMMANDS } from "./commands"; import { generate } from "./generate"; import { checkCommand } from "./check"; import { compare } from "./compare"; +import { variants } from "./variants"; +import { iterate } from "./iterate"; import { resolveApiKey, saveApiKey } from "./auth"; function parseArgs(argv: string[]): { command: string; flags: Record } { @@ -140,11 +142,28 @@ async function main(): Promise { break; case "variants": + await variants({ + brief: flags.brief as string, + briefFile: flags["brief-file"] as string, + count: flags.count ? parseInt(flags.count as string) : 3, + outputDir: (flags["output-dir"] as string) || "/tmp/gstack-variants/", + size: flags.size as string, + quality: flags.quality as string, + }); + break; + case "iterate": + await iterate({ + session: flags.session as string, + feedback: flags.feedback as string, + output: (flags.output as string) || "/tmp/gstack-iterate.png", + }); + break; + case "diff": case "evolve": case "verify": - console.error(`Command '${command}' will be implemented in Commit 2+.`); + console.error(`Command '${command}' will be implemented in Commit 7+.`); process.exit(1); break; } diff --git a/design/src/iterate.ts b/design/src/iterate.ts new file mode 100644 index 00000000..25fdbfa8 --- /dev/null +++ b/design/src/iterate.ts @@ -0,0 +1,179 @@ +/** + * Multi-turn design iteration using OpenAI Responses API. + * + * Primary: uses previous_response_id for conversational threading. + * Fallback: if threading doesn't retain visual context, re-generates + * with original brief + accumulated feedback in a single prompt. + */ + +import fs from "fs"; +import path from "path"; +import { requireApiKey } from "./auth"; +import { readSession, updateSession } from "./session"; + +export interface IterateOptions { + session: string; // Path to session JSON file + feedback: string; // User feedback text + output: string; // Output path for new PNG +} + +/** + * Iterate on an existing design using session state. + */ +export async function iterate(options: IterateOptions): Promise { + const apiKey = requireApiKey(); + const session = readSession(options.session); + + console.error(`Iterating on session ${session.id}...`); + console.error(` Previous iterations: ${session.feedbackHistory.length}`); + console.error(` Feedback: "${options.feedback}"`); + + const startTime = Date.now(); + + // Try multi-turn with previous_response_id first + let success = false; + let responseId = ""; + + try { + const result = await callWithThreading(apiKey, session.lastResponseId, options.feedback); + responseId = result.responseId; + + fs.mkdirSync(path.dirname(options.output), { recursive: true }); + fs.writeFileSync(options.output, Buffer.from(result.imageData, "base64")); + success = true; + } catch (err: any) { + console.error(` Threading failed: ${err.message}`); + console.error(" Falling back to re-generation with accumulated feedback..."); + + // Fallback: re-generate with original brief + all feedback + const accumulatedPrompt = buildAccumulatedPrompt( + session.originalBrief, + [...session.feedbackHistory, options.feedback] + ); + + const result = await callFresh(apiKey, accumulatedPrompt); + responseId = result.responseId; + + fs.mkdirSync(path.dirname(options.output), { recursive: true }); + fs.writeFileSync(options.output, Buffer.from(result.imageData, "base64")); + success = true; + } + + if (success) { + const elapsed = ((Date.now() - startTime) / 1000).toFixed(1); + const size = fs.statSync(options.output).size; + console.error(`Generated (${elapsed}s, ${(size / 1024).toFixed(0)}KB) → ${options.output}`); + + // Update session + updateSession(session, responseId, options.feedback, options.output); + + console.log(JSON.stringify({ + outputPath: options.output, + sessionFile: options.session, + responseId, + iteration: session.feedbackHistory.length + 1, + }, null, 2)); + } +} + +async function callWithThreading( + apiKey: string, + previousResponseId: string, + feedback: string, +): Promise<{ responseId: string; imageData: string }> { + const controller = new AbortController(); + const timeout = setTimeout(() => controller.abort(), 120_000); + + try { + const response = await fetch("https://api.openai.com/v1/responses", { + method: "POST", + headers: { + "Authorization": `Bearer ${apiKey}`, + "Content-Type": "application/json", + }, + body: JSON.stringify({ + model: "gpt-4o", + input: `Based on the previous design, make these changes: ${feedback}`, + previous_response_id: previousResponseId, + tools: [{ type: "image_generation", size: "1536x1024", quality: "high" }], + }), + signal: controller.signal, + }); + + if (!response.ok) { + const error = await response.text(); + throw new Error(`API error (${response.status}): ${error.slice(0, 300)}`); + } + + const data = await response.json() as any; + const imageItem = data.output?.find((item: any) => item.type === "image_generation_call"); + + if (!imageItem?.result) { + throw new Error("No image data in threaded response"); + } + + return { responseId: data.id, imageData: imageItem.result }; + } finally { + clearTimeout(timeout); + } +} + +async function callFresh( + apiKey: string, + prompt: string, +): Promise<{ responseId: string; imageData: string }> { + const controller = new AbortController(); + const timeout = setTimeout(() => controller.abort(), 120_000); + + try { + const response = await fetch("https://api.openai.com/v1/responses", { + method: "POST", + headers: { + "Authorization": `Bearer ${apiKey}`, + "Content-Type": "application/json", + }, + body: JSON.stringify({ + model: "gpt-4o", + input: prompt, + tools: [{ type: "image_generation", size: "1536x1024", quality: "high" }], + }), + signal: controller.signal, + }); + + if (!response.ok) { + const error = await response.text(); + throw new Error(`API error (${response.status}): ${error.slice(0, 300)}`); + } + + const data = await response.json() as any; + const imageItem = data.output?.find((item: any) => item.type === "image_generation_call"); + + if (!imageItem?.result) { + throw new Error("No image data in fresh response"); + } + + return { responseId: data.id, imageData: imageItem.result }; + } finally { + clearTimeout(timeout); + } +} + +function buildAccumulatedPrompt(originalBrief: string, feedback: string[]): string { + const lines = [ + originalBrief, + "", + "Previous feedback (apply all of these changes):", + ]; + + feedback.forEach((f, i) => { + lines.push(`${i + 1}. ${f}`); + }); + + lines.push( + "", + "Generate a new mockup incorporating ALL the feedback above.", + "The result should look like a real production UI, not a wireframe." + ); + + return lines.join("\n"); +} diff --git a/design/src/variants.ts b/design/src/variants.ts new file mode 100644 index 00000000..017fe564 --- /dev/null +++ b/design/src/variants.ts @@ -0,0 +1,173 @@ +/** + * Generate N design variants from a brief. + * Uses staggered parallel: 1s delay between API calls to avoid rate limits. + * Falls back to exponential backoff on 429s. + */ + +import fs from "fs"; +import path from "path"; +import { requireApiKey } from "./auth"; +import { parseBrief } from "./brief"; + +export interface VariantsOptions { + brief?: string; + briefFile?: string; + count: number; + outputDir: string; + size?: string; + quality?: string; +} + +const STYLE_VARIATIONS = [ + "", // First variant uses the brief as-is + "Use a bolder, more dramatic visual style with stronger contrast and larger typography.", + "Use a calmer, more minimal style with generous whitespace and subtle colors.", + "Use a warmer, more approachable style with rounded corners and friendly typography.", + "Use a more professional, corporate style with sharp edges and structured grid layout.", + "Use a dark theme with light text and accent colors for key interactive elements.", + "Use a playful, modern style with asymmetric layout and unexpected color accents.", +]; + +/** + * Generate a single variant with retry on 429. + */ +async function generateVariant( + apiKey: string, + prompt: string, + outputPath: string, + size: string, + quality: string, +): Promise<{ path: string; success: boolean; error?: string }> { + const maxRetries = 3; + let lastError = ""; + + for (let attempt = 0; attempt <= maxRetries; attempt++) { + if (attempt > 0) { + // Exponential backoff: 2s, 4s, 8s + const delay = Math.pow(2, attempt) * 1000; + console.error(` Rate limited, retrying in ${delay / 1000}s...`); + await new Promise(r => setTimeout(r, delay)); + } + + const controller = new AbortController(); + const timeout = setTimeout(() => controller.abort(), 120_000); + + try { + const response = await fetch("https://api.openai.com/v1/responses", { + method: "POST", + headers: { + "Authorization": `Bearer ${apiKey}`, + "Content-Type": "application/json", + }, + body: JSON.stringify({ + model: "gpt-4o", + input: prompt, + tools: [{ type: "image_generation", size, quality }], + }), + signal: controller.signal, + }); + + clearTimeout(timeout); + + if (response.status === 429) { + lastError = "Rate limited (429)"; + continue; + } + + if (!response.ok) { + const error = await response.text(); + return { path: outputPath, success: false, error: `API error (${response.status}): ${error.slice(0, 200)}` }; + } + + const data = await response.json() as any; + const imageItem = data.output?.find((item: any) => item.type === "image_generation_call"); + + if (!imageItem?.result) { + return { path: outputPath, success: false, error: "No image data in response" }; + } + + fs.writeFileSync(outputPath, Buffer.from(imageItem.result, "base64")); + return { path: outputPath, success: true }; + } catch (err: any) { + clearTimeout(timeout); + if (err.name === "AbortError") { + return { path: outputPath, success: false, error: "Timeout (120s)" }; + } + lastError = err.message; + } + } + + return { path: outputPath, success: false, error: lastError }; +} + +/** + * Generate N variants with staggered parallel execution. + */ +export async function variants(options: VariantsOptions): Promise { + const apiKey = requireApiKey(); + const baseBrief = options.briefFile + ? parseBrief(options.briefFile, true) + : parseBrief(options.brief!, false); + + const count = Math.min(options.count, 7); // Cap at 7 style variations + const size = options.size || "1536x1024"; + const quality = options.quality || "high"; + + fs.mkdirSync(options.outputDir, { recursive: true }); + + console.error(`Generating ${count} variants...`); + const startTime = Date.now(); + + // Staggered parallel: start each call 1.5s apart + const promises: Promise<{ path: string; success: boolean; error?: string }>[] = []; + + for (let i = 0; i < count; i++) { + const variation = STYLE_VARIATIONS[i] || ""; + const prompt = variation + ? `${baseBrief}\n\nStyle direction: ${variation}` + : baseBrief; + + const outputPath = path.join(options.outputDir, `variant-${String.fromCharCode(65 + i)}.png`); + + // Stagger: wait 1.5s between launches + const delay = i * 1500; + promises.push( + new Promise(resolve => setTimeout(resolve, delay)) + .then(() => { + console.error(` Starting variant ${String.fromCharCode(65 + i)}...`); + return generateVariant(apiKey, prompt, outputPath, size, quality); + }) + ); + } + + const results = await Promise.allSettled(promises); + const elapsed = ((Date.now() - startTime) / 1000).toFixed(1); + + const succeeded: string[] = []; + const failed: string[] = []; + + for (const result of results) { + if (result.status === "fulfilled" && result.value.success) { + const size = fs.statSync(result.value.path).size; + console.error(` ✓ ${path.basename(result.value.path)} (${(size / 1024).toFixed(0)}KB)`); + succeeded.push(result.value.path); + } else { + const error = result.status === "fulfilled" ? result.value.error : (result.reason as Error).message; + const filePath = result.status === "fulfilled" ? result.value.path : "unknown"; + console.error(` ✗ ${path.basename(filePath)}: ${error}`); + failed.push(path.basename(filePath)); + } + } + + console.error(`\n${succeeded.length}/${count} variants generated (${elapsed}s)`); + + // Output structured result to stdout + console.log(JSON.stringify({ + outputDir: options.outputDir, + count, + succeeded: succeeded.length, + failed: failed.length, + paths: succeeded, + errors: failed, + }, null, 2)); +}