Files
AI-Red-Teaming-Playground-L…/docker-compose-openai.yaml
2025-08-14 09:13:40 -04:00

377 lines
15 KiB
YAML

# OpenAI API configuration instead of Azure OpenAI
# Usage: docker compose -f docker-compose-openai.yaml up
services:
challenge-home:
build:
context: .
dockerfile: docker/challenge-home/Dockerfile
ports:
- 127.0.0.1:5000:5000
environment:
- AUTH_KEY=${AUTH_KEY}
- SECRET_KEY=${SECRET_KEY}
- OPEN_LINK_NEW_TAB=true
volumes:
- ./docker/data/challenge-home/data.json:/app/build/data.json:ro
chat-copilot-1:
build:
context: .
dockerfile: docker/chat-copilot/webapi/Dockerfile
ports:
- 127.0.0.1:4001:4000
environment:
# Config for standard OpenAI API
- KernelMemory__TextGeneratorType=OpenAI
- KernelMemory__Services__OpenAI__APIKey=${OPENAI_API_KEY}
- KernelMemory__Services__OpenAI__TextModel=${OPENAI_TEXT_MODEL}
- KernelMemory__Services__OpenAI__EmbeddingModel=${OPENAI_EMBEDDING_MODEL}
- KernelMemory__Services__OpenAI__MaxRetries=10
- KernelMemory__DataIngestion__EmbeddingGeneratorTypes__0=OpenAI
- KernelMemory__Retrieval__EmbeddingGeneratorType=OpenAI
- KernelMemory__ImageOcrType=None
- ChatStore__Type=filesystem
- Challenge__MetapromptLeak=false
- Challenge__PluginsControl=false
- Challenge__Upload=false
- Challenge__AuthType=ChallengeHome
- Challenge__ChallengeHome__SecretKey=${SECRET_KEY}
- Planner__Model=${OPENAI_TEXT_MODEL}
- AllowedOrigins=http://localhost:5000
- Kestrel__Endpoints__Http__Url=http://0.0.0.0:4000
- PrometheusTelemetry__Endpoint=http://0.0.0.0:4001
- ASPNETCORE_ENVIRONMENT=Production
volumes:
- ./docker/data/chat-copilot-1/appsettings.json:/app/appsettings.Production.json:ro
chat-copilot-2:
build:
context: .
dockerfile: docker/chat-copilot/webapi/Dockerfile
ports:
- 127.0.0.1:4002:4000
environment:
# Config for standard OpenAI API
- KernelMemory__TextGeneratorType=OpenAI
- KernelMemory__Services__OpenAI__APIKey=${OPENAI_API_KEY}
- KernelMemory__Services__OpenAI__TextModel=${OPENAI_TEXT_MODEL}
- KernelMemory__Services__OpenAI__EmbeddingModel=${OPENAI_EMBEDDING_MODEL}
- KernelMemory__Services__OpenAI__MaxRetries=10
- KernelMemory__DataIngestion__EmbeddingGeneratorTypes__0=OpenAI
- KernelMemory__Retrieval__EmbeddingGeneratorType=OpenAI
- KernelMemory__ImageOcrType=None
- ChatStore__Type=filesystem
- Challenge__MetapromptLeak=false
- Challenge__PluginsControl=false
- Challenge__Upload=false
- Challenge__AuthType=ChallengeHome
- Challenge__ChallengeHome__SecretKey=${SECRET_KEY}
- Planner__Model=${OPENAI_TEXT_MODEL}
- AllowedOrigins=http://localhost:5000
- Kestrel__Endpoints__Http__Url=http://0.0.0.0:4000
- PrometheusTelemetry__Endpoint=http://0.0.0.0:4001
- ASPNETCORE_ENVIRONMENT=Production
volumes:
- ./docker/data/chat-copilot-2/appsettings.json:/app/appsettings.Production.json:ro
chat-copilot-3:
build:
context: .
dockerfile: docker/chat-copilot/webapi/Dockerfile
ports:
- 127.0.0.1:4003:4000
environment:
# Config for standard OpenAI API
- KernelMemory__TextGeneratorType=OpenAI
- KernelMemory__Services__OpenAI__APIKey=${OPENAI_API_KEY}
- KernelMemory__Services__OpenAI__TextModel=${OPENAI_TEXT_MODEL}
- KernelMemory__Services__OpenAI__EmbeddingModel=${OPENAI_EMBEDDING_MODEL}
- KernelMemory__Services__OpenAI__MaxRetries=10
- KernelMemory__DataIngestion__EmbeddingGeneratorTypes__0=OpenAI
- KernelMemory__Retrieval__EmbeddingGeneratorType=OpenAI
- KernelMemory__ImageOcrType=None
- ChatStore__Type=filesystem
- Challenge__MetapromptLeak=false
- Challenge__PluginsControl=false
- Challenge__Upload=false
- Challenge__AuthType=ChallengeHome
- Challenge__ChallengeHome__SecretKey=${SECRET_KEY}
- Planner__Model=${OPENAI_TEXT_MODEL}
- AllowedOrigins=http://localhost:5000
- Kestrel__Endpoints__Http__Url=http://0.0.0.0:4000
- PrometheusTelemetry__Endpoint=http://0.0.0.0:4001
- ASPNETCORE_ENVIRONMENT=Production
volumes:
- ./docker/data/chat-copilot-3/appsettings.json:/app/appsettings.Production.json:ro
chat-copilot-4:
build:
context: .
dockerfile: docker/chat-copilot/webapi/Dockerfile
ports:
- 127.0.0.1:4004:4000
environment:
# Config for standard OpenAI API
- KernelMemory__TextGeneratorType=OpenAI
- KernelMemory__Services__OpenAI__APIKey=${OPENAI_API_KEY}
- KernelMemory__Services__OpenAI__TextModel=${OPENAI_TEXT_MODEL}
- KernelMemory__Services__OpenAI__EmbeddingModel=${OPENAI_EMBEDDING_MODEL}
- KernelMemory__Services__OpenAI__MaxRetries=10
- KernelMemory__DataIngestion__EmbeddingGeneratorTypes__0=OpenAI
- KernelMemory__Retrieval__EmbeddingGeneratorType=OpenAI
- KernelMemory__ImageOcrType=None
- ChatStore__Type=filesystem
- Challenge__MetapromptLeak=false
- Challenge__PluginsControl=false
- Challenge__Upload=false
- Challenge__AuthType=ChallengeHome
- Challenge__ChallengeHome__SecretKey=${SECRET_KEY}
- Planner__Model=${OPENAI_TEXT_MODEL}
- AllowedOrigins=http://localhost:5000
- Kestrel__Endpoints__Http__Url=http://0.0.0.0:4000
- PrometheusTelemetry__Endpoint=http://0.0.0.0:4001
- ASPNETCORE_ENVIRONMENT=Production
volumes:
- ./docker/data/chat-copilot-4/appsettings.json:/app/appsettings.Production.json:ro
chat-copilot-5:
build:
context: .
dockerfile: docker/chat-copilot/webapi/Dockerfile
ports:
- 127.0.0.1:4005:4000
environment:
# Config for standard OpenAI API
- KernelMemory__TextGeneratorType=OpenAI
- KernelMemory__Services__OpenAI__APIKey=${OPENAI_API_KEY}
- KernelMemory__Services__OpenAI__TextModel=${OPENAI_TEXT_MODEL}
- KernelMemory__Services__OpenAI__EmbeddingModel=${OPENAI_EMBEDDING_MODEL}
- KernelMemory__Services__OpenAI__MaxRetries=10
- KernelMemory__DataIngestion__EmbeddingGeneratorTypes__0=OpenAI
- KernelMemory__Retrieval__EmbeddingGeneratorType=OpenAI
- KernelMemory__ImageOcrType=None
- ChatStore__Type=filesystem
- Challenge__MetapromptLeak=false
- Challenge__PluginsControl=false
- Challenge__Upload=false
- Challenge__AuthType=ChallengeHome
- Challenge__ChallengeHome__SecretKey=${SECRET_KEY}
- Planner__Model=${OPENAI_TEXT_MODEL}
- AllowedOrigins=http://localhost:5000
- Kestrel__Endpoints__Http__Url=http://0.0.0.0:4000
- PrometheusTelemetry__Endpoint=http://0.0.0.0:4001
- ASPNETCORE_ENVIRONMENT=Production
volumes:
- ./docker/data/chat-copilot-5/appsettings.json:/app/appsettings.Production.json:ro
chat-copilot-6:
build:
context: .
dockerfile: docker/chat-copilot/webapi/Dockerfile
ports:
- 127.0.0.1:4006:4000
environment:
# Config for standard OpenAI API
- KernelMemory__TextGeneratorType=OpenAI
- KernelMemory__Services__OpenAI__APIKey=${OPENAI_API_KEY}
- KernelMemory__Services__OpenAI__TextModel=${OPENAI_TEXT_MODEL}
- KernelMemory__Services__OpenAI__EmbeddingModel=${OPENAI_EMBEDDING_MODEL}
- KernelMemory__Services__OpenAI__MaxRetries=10
- KernelMemory__DataIngestion__EmbeddingGeneratorTypes__0=OpenAI
- KernelMemory__Retrieval__EmbeddingGeneratorType=OpenAI
- KernelMemory__ImageOcrType=None
- ChatStore__Type=filesystem
- Challenge__MetapromptLeak=false
- Challenge__PluginsControl=false
- Challenge__Upload=false
- Challenge__AuthType=ChallengeHome
- Challenge__ChallengeHome__SecretKey=${SECRET_KEY}
- Planner__Model=${OPENAI_TEXT_MODEL}
- AllowedOrigins=http://localhost:5000
- Kestrel__Endpoints__Http__Url=http://0.0.0.0:4000
- PrometheusTelemetry__Endpoint=http://0.0.0.0:4001
- ASPNETCORE_ENVIRONMENT=Production
volumes:
- ./docker/data/chat-copilot-6/appsettings.json:/app/appsettings.Production.json:ro
chat-copilot-7:
build:
context: .
dockerfile: docker/chat-copilot/webapi/Dockerfile
ports:
- 127.0.0.1:4007:4000
environment:
# Config for standard OpenAI API
- KernelMemory__TextGeneratorType=OpenAI
- KernelMemory__Services__OpenAI__APIKey=${OPENAI_API_KEY}
- KernelMemory__Services__OpenAI__TextModel=${OPENAI_TEXT_MODEL}
- KernelMemory__Services__OpenAI__EmbeddingModel=${OPENAI_EMBEDDING_MODEL}
- KernelMemory__Services__OpenAI__MaxRetries=10
- KernelMemory__DataIngestion__EmbeddingGeneratorTypes__0=OpenAI
- KernelMemory__Retrieval__EmbeddingGeneratorType=OpenAI
- KernelMemory__ImageOcrType=None
- ChatStore__Type=filesystem
- Challenge__MetapromptLeak=false
- Challenge__PluginsControl=false
- Challenge__Upload=false
- Challenge__AuthType=ChallengeHome
- Challenge__ChallengeHome__SecretKey=${SECRET_KEY}
- Planner__Model=${OPENAI_TEXT_MODEL}
- AllowedOrigins=http://localhost:5000
- Kestrel__Endpoints__Http__Url=http://0.0.0.0:4000
- PrometheusTelemetry__Endpoint=http://0.0.0.0:4001
- ASPNETCORE_ENVIRONMENT=Production
volumes:
- ./docker/data/chat-copilot-7/appsettings.json:/app/appsettings.Production.json:ro
chat-copilot-8:
build:
context: .
dockerfile: docker/chat-copilot/webapi/Dockerfile
ports:
- 127.0.0.1:4008:4000
environment:
# Config for standard OpenAI API
- KernelMemory__TextGeneratorType=OpenAI
- KernelMemory__Services__OpenAI__APIKey=${OPENAI_API_KEY}
- KernelMemory__Services__OpenAI__TextModel=${OPENAI_TEXT_MODEL}
- KernelMemory__Services__OpenAI__EmbeddingModel=${OPENAI_EMBEDDING_MODEL}
- KernelMemory__Services__OpenAI__MaxRetries=10
- KernelMemory__DataIngestion__EmbeddingGeneratorTypes__0=OpenAI
- KernelMemory__Retrieval__EmbeddingGeneratorType=OpenAI
- KernelMemory__ImageOcrType=None
- ChatStore__Type=filesystem
- Challenge__MetapromptLeak=false
- Challenge__PluginsControl=false
- Challenge__Upload=false
- Challenge__AuthType=ChallengeHome
- Challenge__ChallengeHome__SecretKey=${SECRET_KEY}
- Planner__Model=${OPENAI_TEXT_MODEL}
- AllowedOrigins=http://localhost:5000
- Kestrel__Endpoints__Http__Url=http://0.0.0.0:4000
- PrometheusTelemetry__Endpoint=http://0.0.0.0:4001
- ASPNETCORE_ENVIRONMENT=Production
volumes:
- ./docker/data/chat-copilot-8/appsettings.json:/app/appsettings.Production.json:ro
chat-copilot-9:
build:
context: .
dockerfile: docker/chat-copilot/webapi/Dockerfile
ports:
- 127.0.0.1:4009:4000
environment:
# Config for standard OpenAI API
- KernelMemory__TextGeneratorType=OpenAI
- KernelMemory__Services__OpenAI__APIKey=${OPENAI_API_KEY}
- KernelMemory__Services__OpenAI__TextModel=${OPENAI_TEXT_MODEL}
- KernelMemory__Services__OpenAI__EmbeddingModel=${OPENAI_EMBEDDING_MODEL}
- KernelMemory__Services__OpenAI__MaxRetries=10
- KernelMemory__DataIngestion__EmbeddingGeneratorTypes__0=OpenAI
- KernelMemory__Retrieval__EmbeddingGeneratorType=OpenAI
- KernelMemory__ImageOcrType=None
- ChatStore__Type=filesystem
- Challenge__MetapromptLeak=false
- Challenge__PluginsControl=false
- Challenge__Upload=false
- Challenge__AuthType=ChallengeHome
- Challenge__ChallengeHome__SecretKey=${SECRET_KEY}
- Planner__Model=${OPENAI_TEXT_MODEL}
- AllowedOrigins=http://localhost:5000
- Kestrel__Endpoints__Http__Url=http://0.0.0.0:4000
- PrometheusTelemetry__Endpoint=http://0.0.0.0:4001
- ASPNETCORE_ENVIRONMENT=Production
volumes:
- ./docker/data/chat-copilot-9/appsettings.json:/app/appsettings.Production.json:ro
chat-copilot-10:
build:
context: .
dockerfile: docker/chat-copilot/webapi/Dockerfile
ports:
- 127.0.0.1:4010:4000
environment:
# Config for standard OpenAI API
- KernelMemory__TextGeneratorType=OpenAI
- KernelMemory__Services__OpenAI__APIKey=${OPENAI_API_KEY}
- KernelMemory__Services__OpenAI__TextModel=${OPENAI_TEXT_MODEL}
- KernelMemory__Services__OpenAI__EmbeddingModel=${OPENAI_EMBEDDING_MODEL}
- KernelMemory__Services__OpenAI__MaxRetries=10
- KernelMemory__DataIngestion__EmbeddingGeneratorTypes__0=OpenAI
- KernelMemory__Retrieval__EmbeddingGeneratorType=OpenAI
- KernelMemory__ImageOcrType=None
- ChatStore__Type=filesystem
- Challenge__MetapromptLeak=false
- Challenge__PluginsControl=false
- Challenge__Upload=false
- Challenge__AuthType=ChallengeHome
- Challenge__ChallengeHome__SecretKey=${SECRET_KEY}
- Planner__Model=${OPENAI_TEXT_MODEL}
- AllowedOrigins=http://localhost:5000
- Kestrel__Endpoints__Http__Url=http://0.0.0.0:4000
- PrometheusTelemetry__Endpoint=http://0.0.0.0:4001
- ASPNETCORE_ENVIRONMENT=Production
volumes:
- ./docker/data/chat-copilot-10/appsettings.json:/app/appsettings.Production.json:ro
chat-copilot-11:
build:
context: .
dockerfile: docker/chat-copilot/webapi/Dockerfile
ports:
- 127.0.0.1:4011:4000
environment:
# Config for standard OpenAI API
- KernelMemory__TextGeneratorType=OpenAI
- KernelMemory__Services__OpenAI__APIKey=${OPENAI_API_KEY}
- KernelMemory__Services__OpenAI__TextModel=${OPENAI_TEXT_MODEL}
- KernelMemory__Services__OpenAI__EmbeddingModel=${OPENAI_EMBEDDING_MODEL}
- KernelMemory__Services__OpenAI__MaxRetries=10
- KernelMemory__DataIngestion__EmbeddingGeneratorTypes__0=OpenAI
- KernelMemory__Retrieval__EmbeddingGeneratorType=OpenAI
- KernelMemory__ImageOcrType=None
- ChatStore__Type=filesystem
- Challenge__MetapromptLeak=false
- Challenge__PluginsControl=false
- Challenge__Upload=false
- Challenge__AuthType=ChallengeHome
- Challenge__ChallengeHome__SecretKey=${SECRET_KEY}
- Planner__Model=${OPENAI_TEXT_MODEL}
- AllowedOrigins=http://localhost:5000
- Kestrel__Endpoints__Http__Url=http://0.0.0.0:4000
- PrometheusTelemetry__Endpoint=http://0.0.0.0:4001
- ASPNETCORE_ENVIRONMENT=Production
volumes:
- ./docker/data/chat-copilot-11/appsettings.json:/app/appsettings.Production.json:ro
chat-copilot-12:
build:
context: .
dockerfile: docker/chat-copilot/webapi/Dockerfile
ports:
- 127.0.0.1:4012:4000
environment:
# Config for standard OpenAI API
- KernelMemory__TextGeneratorType=OpenAI
- KernelMemory__Services__OpenAI__APIKey=${OPENAI_API_KEY}
- KernelMemory__Services__OpenAI__TextModel=${OPENAI_TEXT_MODEL}
- KernelMemory__Services__OpenAI__EmbeddingModel=${OPENAI_EMBEDDING_MODEL}
- KernelMemory__Services__OpenAI__MaxRetries=10
- KernelMemory__DataIngestion__EmbeddingGeneratorTypes__0=OpenAI
- KernelMemory__Retrieval__EmbeddingGeneratorType=OpenAI
- KernelMemory__ImageOcrType=None
- ChatStore__Type=filesystem
- Challenge__MetapromptLeak=false
- Challenge__PluginsControl=false
- Challenge__Upload=false
- Challenge__AuthType=ChallengeHome
- Challenge__ChallengeHome__SecretKey=${SECRET_KEY}
- Planner__Model=${OPENAI_TEXT_MODEL}
- AllowedOrigins=http://localhost:5000
- Kestrel__Endpoints__Http__Url=http://0.0.0.0:4000
- PrometheusTelemetry__Endpoint=http://0.0.0.0:4001
- ASPNETCORE_ENVIRONMENT=Production
volumes:
- ./docker/data/chat-copilot-12/appsettings.json:/app/appsettings.Production.json:ro