Files
llmsecops-research/.github/workflows/archived/llmsecops-cicd.test.garak.yml
2025-08-21 12:50:09 -06:00

48 lines
1.4 KiB
YAML

name: '[Deprecated] LLM Prompt Testing (Garak test.Test probe)'
on:
workflow_dispatch:
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
# - name: 'set up git LFS'
# run: git lfs install
- name: 'set up Python'
uses: actions/setup-python@v3
with:
python-version: '3.12'
# - name: 'set up Microsoft Phi-3 Mini 4k LLM from HuggingFace'
# run: |
# pip install huggingface-hub[cli]
# huggingface-cli download microsoft/Phi-3-mini-4k-instruct-onnx --include cpu_and_mobile/cpu-int4-rtn-block-32-acc-level-4/* --local-dir ${{ github.workspace }}/tests/llm
# pip install onnxruntime-genai
- name: 'set up Garak'
run: |
pip install garak
- name: 'Garak test probe'
run: |
python -m garak --model_type test.Blank --probes test.Test
- name: 'display report'
run: |
ls /home/runner/.local/share/garak/garak_runs/ -al
echo
cat /home/runner/.local/share/garak/garak_runs/garak.*.jsonl
echo
echo
cat /home/runner/.local/share/garak/garak_runs/garak.*.html
- uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02
with:
name: 'garak_report'
path: /home/runner/.local/share/garak/garak_runs/garak.*.html