From f4a45332927155516e5f20c6e5966daaf9d7896b Mon Sep 17 00:00:00 2001 From: Pierre Therrode Date: Thu, 14 Aug 2025 15:13:40 +0200 Subject: [PATCH] Add OpenAI standard configuration and docker-compose-openai.yaml setup (#32) Co-authored-by: Pierre Therrode Co-authored-by: pierre --- .env.example | 8 +- README.md | 40 +++- docker-compose-openai.yaml | 376 +++++++++++++++++++++++++++++++++++++ 3 files changed, 418 insertions(+), 6 deletions(-) create mode 100644 docker-compose-openai.yaml diff --git a/.env.example b/.env.example index 2104ca0..4580139 100644 --- a/.env.example +++ b/.env.example @@ -1,7 +1,7 @@ # This an example of the .env file. You should create a .env file in the root directory of the project and fill in the values # with your own values. -# Used for cookie security. +# Used for cookie security. # You can generate one with python by running this python command `python -c 'import secrets; print(secrets.token_hex(16))'` SECRET_KEY=YOUR_SECRET_KEY @@ -14,3 +14,9 @@ AUTH_KEY=YOUR_AUTH_KEY AOAI_ENDPOINT=https://YOUR_AOAI_ENDPOINT AOAI_API_KEY=YOUR_AOAI_API_KEY AOAI_MODEL_NAME=gpt-4o + +# OpenAI Standard Configuration +# OPENAI_API_KEY=sk-your_openai_api_key_here +# OPENAI_ORG_ID= # Optional and can cause errors if mismatched +# OPENAI_TEXT_MODEL=gpt-3.5-turbo +# OPENAI_EMBEDDING_MODEL=text-embedding-ada-002 diff --git a/README.md b/README.md index 4dc2524..4fefc2b 100644 --- a/README.md +++ b/README.md @@ -31,21 +31,52 @@ These challenges are designed to teach security professionals to systematically - [Docker](https://docs.docker.com/get-docker/) installed - [Python 3.8+](https://www.python.org/downloads/) installed -- [Azure OpenAI Endpoint](https://azure.microsoft.com/en-us/products/ai-services/openai-service) endpoint with an api-key -- An Azure Foundry deployment named `text-embedding-ada-002` using the model `text-embedding-ada-002`, as well as the model you intend to use. Ex: `gpt-4o` +- **Option 1:** [Azure OpenAI Endpoint](https://azure.microsoft.com/en-us/products/ai-services/openai-service) endpoint with an api-key +- **Option 2:** [OpenAI API Key](https://platform.openai.com/api-keys) to use the standard OpenAI API +- For Azure OpenAI: An Azure Foundry deployment named `text-embedding-ada-002` using the model `text-embedding-ada-002`, as well as the model you intend to use. Ex: `gpt-4o` -### Environment Variables +### Configuration + +#### Option 1: Using Azure OpenAI (docker-compose.yaml) You can set the environment variables for the Azure OpenAI endpoint in the `.env` file. Please use the `.env.example` file as a template. +#### Option 2: Using Standard OpenAI API (docker-compose-openai.yaml) + +If you prefer to use the standard OpenAI API, you need to configure the following environment variables: + +```bash +export OPENAI_API_KEY="your-openai-api-key" +export OPENAI_TEXT_MODEL="gpt-4o" # or the model of your choice +export OPENAI_EMBEDDING_MODEL="text-embedding-ada-002" +export AUTH_KEY="your-auth-key" +export SECRET_KEY="your-secret-key" +``` + ### Running the Playground Labs +#### Option 1: With Azure OpenAI + The easiest way to run the playground labs is to use the [Docker Compose](https://docs.docker.com/compose/) file included in this repository. This will start all the components needed to run the playground environment with a set of 12 challenges. -``` +```bash docker-compose up ``` +#### Option 2: With Standard OpenAI API + +To use the standard OpenAI API instead of Azure OpenAI, use the `docker-compose-openai.yaml` file: + +```bash +docker compose -f docker-compose-openai.yaml up +``` + +### Accessing the Challenges + +Once the challenges are running you can access them using the following url: `http://localhost:5000/login?auth=[YOUR-AUTH-KEY]`. + +On macOS you will need to access `http://127.0.0.1:5000/login?=[YOUR-AUTH-KEY]` because localhost maps to IPv6 and the containers are listening on IPv4. + ### Changing the Challenges If you would like to change the challenges, you can do so by changing the `challenges/challenges.json` file. This file contains the description of the challenges and their objectives. You can then use the script `generate.py` to generate the new docker-compose file with the new challenges and their configuration. @@ -83,4 +114,3 @@ Originally, these challenges were deployed in Kubernetes in Azure. The Kubernete - [PyRIT Website](https://azure.github.io/PyRIT) - [Join the PyRIT Discord](https://discord.gg/wwRaYre8kR) - [Microsoft AI Red Team Overview](https://aka.ms/airedteam) - diff --git a/docker-compose-openai.yaml b/docker-compose-openai.yaml new file mode 100644 index 0000000..82069f2 --- /dev/null +++ b/docker-compose-openai.yaml @@ -0,0 +1,376 @@ +# OpenAI API configuration instead of Azure OpenAI +# Usage: docker compose -f docker-compose-openai.yaml up + +services: + challenge-home: + build: + context: . + dockerfile: docker/challenge-home/Dockerfile + ports: + - 127.0.0.1:5000:5000 + environment: + - AUTH_KEY=${AUTH_KEY} + - SECRET_KEY=${SECRET_KEY} + - OPEN_LINK_NEW_TAB=true + volumes: + - ./docker/data/challenge-home/data.json:/app/build/data.json:ro + + chat-copilot-1: + build: + context: . + dockerfile: docker/chat-copilot/webapi/Dockerfile + ports: + - 127.0.0.1:4001:4000 + environment: + # Config for standard OpenAI API + - KernelMemory__TextGeneratorType=OpenAI + - KernelMemory__Services__OpenAI__APIKey=${OPENAI_API_KEY} + - KernelMemory__Services__OpenAI__TextModel=${OPENAI_TEXT_MODEL} + - KernelMemory__Services__OpenAI__EmbeddingModel=${OPENAI_EMBEDDING_MODEL} + - KernelMemory__Services__OpenAI__MaxRetries=10 + - KernelMemory__DataIngestion__EmbeddingGeneratorTypes__0=OpenAI + - KernelMemory__Retrieval__EmbeddingGeneratorType=OpenAI + - KernelMemory__ImageOcrType=None + - ChatStore__Type=filesystem + - Challenge__MetapromptLeak=false + - Challenge__PluginsControl=false + - Challenge__Upload=false + - Challenge__AuthType=ChallengeHome + - Challenge__ChallengeHome__SecretKey=${SECRET_KEY} + - Planner__Model=${OPENAI_TEXT_MODEL} + - AllowedOrigins=http://localhost:5000 + - Kestrel__Endpoints__Http__Url=http://0.0.0.0:4000 + - PrometheusTelemetry__Endpoint=http://0.0.0.0:4001 + - ASPNETCORE_ENVIRONMENT=Production + volumes: + - ./docker/data/chat-copilot-1/appsettings.json:/app/appsettings.Production.json:ro + + chat-copilot-2: + build: + context: . + dockerfile: docker/chat-copilot/webapi/Dockerfile + ports: + - 127.0.0.1:4002:4000 + environment: + # Config for standard OpenAI API + - KernelMemory__TextGeneratorType=OpenAI + - KernelMemory__Services__OpenAI__APIKey=${OPENAI_API_KEY} + - KernelMemory__Services__OpenAI__TextModel=${OPENAI_TEXT_MODEL} + - KernelMemory__Services__OpenAI__EmbeddingModel=${OPENAI_EMBEDDING_MODEL} + - KernelMemory__Services__OpenAI__MaxRetries=10 + - KernelMemory__DataIngestion__EmbeddingGeneratorTypes__0=OpenAI + - KernelMemory__Retrieval__EmbeddingGeneratorType=OpenAI + - KernelMemory__ImageOcrType=None + - ChatStore__Type=filesystem + - Challenge__MetapromptLeak=false + - Challenge__PluginsControl=false + - Challenge__Upload=false + - Challenge__AuthType=ChallengeHome + - Challenge__ChallengeHome__SecretKey=${SECRET_KEY} + - Planner__Model=${OPENAI_TEXT_MODEL} + - AllowedOrigins=http://localhost:5000 + - Kestrel__Endpoints__Http__Url=http://0.0.0.0:4000 + - PrometheusTelemetry__Endpoint=http://0.0.0.0:4001 + - ASPNETCORE_ENVIRONMENT=Production + volumes: + - ./docker/data/chat-copilot-2/appsettings.json:/app/appsettings.Production.json:ro + + chat-copilot-3: + build: + context: . + dockerfile: docker/chat-copilot/webapi/Dockerfile + ports: + - 127.0.0.1:4003:4000 + environment: + # Config for standard OpenAI API + - KernelMemory__TextGeneratorType=OpenAI + - KernelMemory__Services__OpenAI__APIKey=${OPENAI_API_KEY} + - KernelMemory__Services__OpenAI__TextModel=${OPENAI_TEXT_MODEL} + - KernelMemory__Services__OpenAI__EmbeddingModel=${OPENAI_EMBEDDING_MODEL} + - KernelMemory__Services__OpenAI__MaxRetries=10 + - KernelMemory__DataIngestion__EmbeddingGeneratorTypes__0=OpenAI + - KernelMemory__Retrieval__EmbeddingGeneratorType=OpenAI + - KernelMemory__ImageOcrType=None + - ChatStore__Type=filesystem + - Challenge__MetapromptLeak=false + - Challenge__PluginsControl=false + - Challenge__Upload=false + - Challenge__AuthType=ChallengeHome + - Challenge__ChallengeHome__SecretKey=${SECRET_KEY} + - Planner__Model=${OPENAI_TEXT_MODEL} + - AllowedOrigins=http://localhost:5000 + - Kestrel__Endpoints__Http__Url=http://0.0.0.0:4000 + - PrometheusTelemetry__Endpoint=http://0.0.0.0:4001 + - ASPNETCORE_ENVIRONMENT=Production + volumes: + - ./docker/data/chat-copilot-3/appsettings.json:/app/appsettings.Production.json:ro + + chat-copilot-4: + build: + context: . + dockerfile: docker/chat-copilot/webapi/Dockerfile + ports: + - 127.0.0.1:4004:4000 + environment: + # Config for standard OpenAI API + - KernelMemory__TextGeneratorType=OpenAI + - KernelMemory__Services__OpenAI__APIKey=${OPENAI_API_KEY} + - KernelMemory__Services__OpenAI__TextModel=${OPENAI_TEXT_MODEL} + - KernelMemory__Services__OpenAI__EmbeddingModel=${OPENAI_EMBEDDING_MODEL} + - KernelMemory__Services__OpenAI__MaxRetries=10 + - KernelMemory__DataIngestion__EmbeddingGeneratorTypes__0=OpenAI + - KernelMemory__Retrieval__EmbeddingGeneratorType=OpenAI + - KernelMemory__ImageOcrType=None + - ChatStore__Type=filesystem + - Challenge__MetapromptLeak=false + - Challenge__PluginsControl=false + - Challenge__Upload=false + - Challenge__AuthType=ChallengeHome + - Challenge__ChallengeHome__SecretKey=${SECRET_KEY} + - Planner__Model=${OPENAI_TEXT_MODEL} + - AllowedOrigins=http://localhost:5000 + - Kestrel__Endpoints__Http__Url=http://0.0.0.0:4000 + - PrometheusTelemetry__Endpoint=http://0.0.0.0:4001 + - ASPNETCORE_ENVIRONMENT=Production + volumes: + - ./docker/data/chat-copilot-4/appsettings.json:/app/appsettings.Production.json:ro + + chat-copilot-5: + build: + context: . + dockerfile: docker/chat-copilot/webapi/Dockerfile + ports: + - 127.0.0.1:4005:4000 + environment: + # Config for standard OpenAI API + - KernelMemory__TextGeneratorType=OpenAI + - KernelMemory__Services__OpenAI__APIKey=${OPENAI_API_KEY} + - KernelMemory__Services__OpenAI__TextModel=${OPENAI_TEXT_MODEL} + - KernelMemory__Services__OpenAI__EmbeddingModel=${OPENAI_EMBEDDING_MODEL} + - KernelMemory__Services__OpenAI__MaxRetries=10 + - KernelMemory__DataIngestion__EmbeddingGeneratorTypes__0=OpenAI + - KernelMemory__Retrieval__EmbeddingGeneratorType=OpenAI + - KernelMemory__ImageOcrType=None + - ChatStore__Type=filesystem + - Challenge__MetapromptLeak=false + - Challenge__PluginsControl=false + - Challenge__Upload=false + - Challenge__AuthType=ChallengeHome + - Challenge__ChallengeHome__SecretKey=${SECRET_KEY} + - Planner__Model=${OPENAI_TEXT_MODEL} + - AllowedOrigins=http://localhost:5000 + - Kestrel__Endpoints__Http__Url=http://0.0.0.0:4000 + - PrometheusTelemetry__Endpoint=http://0.0.0.0:4001 + - ASPNETCORE_ENVIRONMENT=Production + volumes: + - ./docker/data/chat-copilot-5/appsettings.json:/app/appsettings.Production.json:ro + + chat-copilot-6: + build: + context: . + dockerfile: docker/chat-copilot/webapi/Dockerfile + ports: + - 127.0.0.1:4006:4000 + environment: + # Config for standard OpenAI API + - KernelMemory__TextGeneratorType=OpenAI + - KernelMemory__Services__OpenAI__APIKey=${OPENAI_API_KEY} + - KernelMemory__Services__OpenAI__TextModel=${OPENAI_TEXT_MODEL} + - KernelMemory__Services__OpenAI__EmbeddingModel=${OPENAI_EMBEDDING_MODEL} + - KernelMemory__Services__OpenAI__MaxRetries=10 + - KernelMemory__DataIngestion__EmbeddingGeneratorTypes__0=OpenAI + - KernelMemory__Retrieval__EmbeddingGeneratorType=OpenAI + - KernelMemory__ImageOcrType=None + - ChatStore__Type=filesystem + - Challenge__MetapromptLeak=false + - Challenge__PluginsControl=false + - Challenge__Upload=false + - Challenge__AuthType=ChallengeHome + - Challenge__ChallengeHome__SecretKey=${SECRET_KEY} + - Planner__Model=${OPENAI_TEXT_MODEL} + - AllowedOrigins=http://localhost:5000 + - Kestrel__Endpoints__Http__Url=http://0.0.0.0:4000 + - PrometheusTelemetry__Endpoint=http://0.0.0.0:4001 + - ASPNETCORE_ENVIRONMENT=Production + volumes: + - ./docker/data/chat-copilot-6/appsettings.json:/app/appsettings.Production.json:ro + + chat-copilot-7: + build: + context: . + dockerfile: docker/chat-copilot/webapi/Dockerfile + ports: + - 127.0.0.1:4007:4000 + environment: + # Config for standard OpenAI API + - KernelMemory__TextGeneratorType=OpenAI + - KernelMemory__Services__OpenAI__APIKey=${OPENAI_API_KEY} + - KernelMemory__Services__OpenAI__TextModel=${OPENAI_TEXT_MODEL} + - KernelMemory__Services__OpenAI__EmbeddingModel=${OPENAI_EMBEDDING_MODEL} + - KernelMemory__Services__OpenAI__MaxRetries=10 + - KernelMemory__DataIngestion__EmbeddingGeneratorTypes__0=OpenAI + - KernelMemory__Retrieval__EmbeddingGeneratorType=OpenAI + - KernelMemory__ImageOcrType=None + - ChatStore__Type=filesystem + - Challenge__MetapromptLeak=false + - Challenge__PluginsControl=false + - Challenge__Upload=false + - Challenge__AuthType=ChallengeHome + - Challenge__ChallengeHome__SecretKey=${SECRET_KEY} + - Planner__Model=${OPENAI_TEXT_MODEL} + - AllowedOrigins=http://localhost:5000 + - Kestrel__Endpoints__Http__Url=http://0.0.0.0:4000 + - PrometheusTelemetry__Endpoint=http://0.0.0.0:4001 + - ASPNETCORE_ENVIRONMENT=Production + volumes: + - ./docker/data/chat-copilot-7/appsettings.json:/app/appsettings.Production.json:ro + + chat-copilot-8: + build: + context: . + dockerfile: docker/chat-copilot/webapi/Dockerfile + ports: + - 127.0.0.1:4008:4000 + environment: + # Config for standard OpenAI API + - KernelMemory__TextGeneratorType=OpenAI + - KernelMemory__Services__OpenAI__APIKey=${OPENAI_API_KEY} + - KernelMemory__Services__OpenAI__TextModel=${OPENAI_TEXT_MODEL} + - KernelMemory__Services__OpenAI__EmbeddingModel=${OPENAI_EMBEDDING_MODEL} + - KernelMemory__Services__OpenAI__MaxRetries=10 + - KernelMemory__DataIngestion__EmbeddingGeneratorTypes__0=OpenAI + - KernelMemory__Retrieval__EmbeddingGeneratorType=OpenAI + - KernelMemory__ImageOcrType=None + - ChatStore__Type=filesystem + - Challenge__MetapromptLeak=false + - Challenge__PluginsControl=false + - Challenge__Upload=false + - Challenge__AuthType=ChallengeHome + - Challenge__ChallengeHome__SecretKey=${SECRET_KEY} + - Planner__Model=${OPENAI_TEXT_MODEL} + - AllowedOrigins=http://localhost:5000 + - Kestrel__Endpoints__Http__Url=http://0.0.0.0:4000 + - PrometheusTelemetry__Endpoint=http://0.0.0.0:4001 + - ASPNETCORE_ENVIRONMENT=Production + volumes: + - ./docker/data/chat-copilot-8/appsettings.json:/app/appsettings.Production.json:ro + + chat-copilot-9: + build: + context: . + dockerfile: docker/chat-copilot/webapi/Dockerfile + ports: + - 127.0.0.1:4009:4000 + environment: + # Config for standard OpenAI API + - KernelMemory__TextGeneratorType=OpenAI + - KernelMemory__Services__OpenAI__APIKey=${OPENAI_API_KEY} + - KernelMemory__Services__OpenAI__TextModel=${OPENAI_TEXT_MODEL} + - KernelMemory__Services__OpenAI__EmbeddingModel=${OPENAI_EMBEDDING_MODEL} + - KernelMemory__Services__OpenAI__MaxRetries=10 + - KernelMemory__DataIngestion__EmbeddingGeneratorTypes__0=OpenAI + - KernelMemory__Retrieval__EmbeddingGeneratorType=OpenAI + - KernelMemory__ImageOcrType=None + - ChatStore__Type=filesystem + - Challenge__MetapromptLeak=false + - Challenge__PluginsControl=false + - Challenge__Upload=false + - Challenge__AuthType=ChallengeHome + - Challenge__ChallengeHome__SecretKey=${SECRET_KEY} + - Planner__Model=${OPENAI_TEXT_MODEL} + - AllowedOrigins=http://localhost:5000 + - Kestrel__Endpoints__Http__Url=http://0.0.0.0:4000 + - PrometheusTelemetry__Endpoint=http://0.0.0.0:4001 + - ASPNETCORE_ENVIRONMENT=Production + volumes: + - ./docker/data/chat-copilot-9/appsettings.json:/app/appsettings.Production.json:ro + + chat-copilot-10: + build: + context: . + dockerfile: docker/chat-copilot/webapi/Dockerfile + ports: + - 127.0.0.1:4010:4000 + environment: + # Config for standard OpenAI API + - KernelMemory__TextGeneratorType=OpenAI + - KernelMemory__Services__OpenAI__APIKey=${OPENAI_API_KEY} + - KernelMemory__Services__OpenAI__TextModel=${OPENAI_TEXT_MODEL} + - KernelMemory__Services__OpenAI__EmbeddingModel=${OPENAI_EMBEDDING_MODEL} + - KernelMemory__Services__OpenAI__MaxRetries=10 + - KernelMemory__DataIngestion__EmbeddingGeneratorTypes__0=OpenAI + - KernelMemory__Retrieval__EmbeddingGeneratorType=OpenAI + - KernelMemory__ImageOcrType=None + - ChatStore__Type=filesystem + - Challenge__MetapromptLeak=false + - Challenge__PluginsControl=false + - Challenge__Upload=false + - Challenge__AuthType=ChallengeHome + - Challenge__ChallengeHome__SecretKey=${SECRET_KEY} + - Planner__Model=${OPENAI_TEXT_MODEL} + - AllowedOrigins=http://localhost:5000 + - Kestrel__Endpoints__Http__Url=http://0.0.0.0:4000 + - PrometheusTelemetry__Endpoint=http://0.0.0.0:4001 + - ASPNETCORE_ENVIRONMENT=Production + volumes: + - ./docker/data/chat-copilot-10/appsettings.json:/app/appsettings.Production.json:ro + + chat-copilot-11: + build: + context: . + dockerfile: docker/chat-copilot/webapi/Dockerfile + ports: + - 127.0.0.1:4011:4000 + environment: + # Config for standard OpenAI API + - KernelMemory__TextGeneratorType=OpenAI + - KernelMemory__Services__OpenAI__APIKey=${OPENAI_API_KEY} + - KernelMemory__Services__OpenAI__TextModel=${OPENAI_TEXT_MODEL} + - KernelMemory__Services__OpenAI__EmbeddingModel=${OPENAI_EMBEDDING_MODEL} + - KernelMemory__Services__OpenAI__MaxRetries=10 + - KernelMemory__DataIngestion__EmbeddingGeneratorTypes__0=OpenAI + - KernelMemory__Retrieval__EmbeddingGeneratorType=OpenAI + - KernelMemory__ImageOcrType=None + - ChatStore__Type=filesystem + - Challenge__MetapromptLeak=false + - Challenge__PluginsControl=false + - Challenge__Upload=false + - Challenge__AuthType=ChallengeHome + - Challenge__ChallengeHome__SecretKey=${SECRET_KEY} + - Planner__Model=${OPENAI_TEXT_MODEL} + - AllowedOrigins=http://localhost:5000 + - Kestrel__Endpoints__Http__Url=http://0.0.0.0:4000 + - PrometheusTelemetry__Endpoint=http://0.0.0.0:4001 + - ASPNETCORE_ENVIRONMENT=Production + volumes: + - ./docker/data/chat-copilot-11/appsettings.json:/app/appsettings.Production.json:ro + + chat-copilot-12: + build: + context: . + dockerfile: docker/chat-copilot/webapi/Dockerfile + ports: + - 127.0.0.1:4012:4000 + environment: + # Config for standard OpenAI API + - KernelMemory__TextGeneratorType=OpenAI + - KernelMemory__Services__OpenAI__APIKey=${OPENAI_API_KEY} + - KernelMemory__Services__OpenAI__TextModel=${OPENAI_TEXT_MODEL} + - KernelMemory__Services__OpenAI__EmbeddingModel=${OPENAI_EMBEDDING_MODEL} + - KernelMemory__Services__OpenAI__MaxRetries=10 + - KernelMemory__DataIngestion__EmbeddingGeneratorTypes__0=OpenAI + - KernelMemory__Retrieval__EmbeddingGeneratorType=OpenAI + - KernelMemory__ImageOcrType=None + - ChatStore__Type=filesystem + - Challenge__MetapromptLeak=false + - Challenge__PluginsControl=false + - Challenge__Upload=false + - Challenge__AuthType=ChallengeHome + - Challenge__ChallengeHome__SecretKey=${SECRET_KEY} + - Planner__Model=${OPENAI_TEXT_MODEL} + - AllowedOrigins=http://localhost:5000 + - Kestrel__Endpoints__Http__Url=http://0.0.0.0:4000 + - PrometheusTelemetry__Endpoint=http://0.0.0.0:4001 + - ASPNETCORE_ENVIRONMENT=Production + volumes: + - ./docker/data/chat-copilot-12/appsettings.json:/app/appsettings.Production.json:ro