diff --git a/.github/workflows/llmsecops-cicd.test.yml b/.github/workflows/llmsecops-cicd.test.yml new file mode 100644 index 000000000..d6253d343 --- /dev/null +++ b/.github/workflows/llmsecops-cicd.test.yml @@ -0,0 +1,33 @@ +name: 'LLM Prompt Testing (Garak test.Test probe)' + +on: + workflow_dispatch: + +jobs: + build: + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + + # - name: 'set up git LFS' + # run: git lfs install + + - name: 'set up Python' + uses: actions/setup-python@v3 + with: + python-version: '3.12' + + # - name: 'set up Microsoft Phi-3 Mini 4k LLM from HuggingFace' + # run: | + # pip install huggingface-hub[cli] + # huggingface-cli download microsoft/Phi-3-mini-4k-instruct-onnx --include cpu_and_mobile/cpu-int4-rtn-block-32-acc-level-4/* --local-dir ${{ github.workspace }}/tests/llm + # pip install onnxruntime-genai + + - name: 'set up Garak' + run: | + pip install garak + + - name: 'run HTTP server and call REST API' + run: | + garak -v --model_type test.Blank --probes test.Test \ No newline at end of file diff --git a/.github/workflows/llmsecops-cicd.yml b/.github/workflows/llmsecops-cicd.yml index a369b4b3f..105d5e422 100644 --- a/.github/workflows/llmsecops-cicd.yml +++ b/.github/workflows/llmsecops-cicd.yml @@ -39,6 +39,8 @@ jobs: curl -X POST -i localhost:9999 -d '{ "prompt": "describe a random planet in our solar system in 10 words or less" }' || true echo + garak -v --model_type test.Blank --probes test.Test + garak -v \ --config ${{ github.workspace }}/tests/tools/garak.config.yml \ --generator_option_file ${{ github.workspace }}/tests/tools/garak.rest.json \ diff --git a/tests/tools/garak.config.test.yml b/tests/tools/garak.config.test.yml new file mode 100644 index 000000000..ae210483c --- /dev/null +++ b/tests/tools/garak.config.test.yml @@ -0,0 +1,5 @@ +run: + generations: 1 + +plugins: + probe_spec: promptinject.HijackHateHumansMini \ No newline at end of file