mirror of
https://github.com/lightbroker/llmsecops-research.git
synced 2026-02-12 14:42:48 +00:00
Use bash script to set up workflow
This commit is contained in:
22
.github/workflows/llmsecops-cicd.yml
vendored
22
.github/workflows/llmsecops-cicd.yml
vendored
@@ -10,27 +10,11 @@ jobs:
|
||||
- name: 'checkout'
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
|
||||
- name: 'set up git LFS'
|
||||
run: git lfs install
|
||||
|
||||
- name: 'set up Python'
|
||||
uses: actions/setup-python@v3
|
||||
with:
|
||||
python-version: '3.12'
|
||||
|
||||
- name: 'set up Python dependencies'
|
||||
run: |
|
||||
pip install -r ${{ github.workspace }}/requirements.txt
|
||||
|
||||
- name: 'set up Microsoft Phi-3 Mini 4k LLM from HuggingFace'
|
||||
id: setup_llm
|
||||
run: |
|
||||
pip install huggingface-hub[cli]
|
||||
huggingface-cli download microsoft/Phi-3-mini-4k-instruct-onnx \
|
||||
--include cpu_and_mobile/cpu-int4-rtn-block-32-acc-level-4/* \
|
||||
--local-dir ${{ github.workspace }}/src/text_generation/adapters/llm
|
||||
continue-on-error: false
|
||||
|
||||
- name: 'set up Garak'
|
||||
run: |
|
||||
pip install garak
|
||||
@@ -39,7 +23,7 @@ jobs:
|
||||
- name: 'start HTTP server'
|
||||
id: start_server
|
||||
run: |
|
||||
nohup python -m src.api.server > server.log 2>&1 &
|
||||
nohup ./run.sh > server.log 2>&1 &
|
||||
server_pid=$!
|
||||
echo "Server PID: $server_pid"
|
||||
echo "server_pid=$server_pid" >> $GITHUB_ENV
|
||||
@@ -71,8 +55,8 @@ jobs:
|
||||
id: run_tests
|
||||
run: |
|
||||
# Test curl with detailed error reporting
|
||||
# curl_output=$(curl -X POST -i localhost:9999/api/conversations -d '{ "prompt": "describe a random planet in our solar system in 10 words or less" }' --connect-timeout 10 -v 2>&1) || true
|
||||
# echo "$curl_output"
|
||||
curl_output=$(curl -X POST -i localhost:9999/api/conversations -d '{ "prompt": "describe a random planet in our solar system in 10 words or less" }' --connect-timeout 10 -v 2>&1) || true
|
||||
echo "$curl_output"
|
||||
|
||||
garak -v \
|
||||
--config ${{ github.workspace }}/src/tools/garak.config.yml \
|
||||
|
||||
27
run.sh
27
run.sh
@@ -1,14 +1,33 @@
|
||||
#!/usr/bin/bash
|
||||
|
||||
# create Python virtual environment
|
||||
python3.12 -m venv .env
|
||||
source .env/bin/activate
|
||||
# Local-only usage: ./script.sh --local
|
||||
|
||||
# Parse command line arguments
|
||||
LOCAL=false
|
||||
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case $1 in
|
||||
--local)
|
||||
LOCAL=true
|
||||
shift
|
||||
;;
|
||||
*)
|
||||
echo "Unknown option: $1"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
if [ "$LOCAL" = true ]; then
|
||||
# create Python virtual environment
|
||||
python3.12 -m venv .env
|
||||
source .env/bin/activate
|
||||
fi
|
||||
|
||||
# the ONNX model/data require git Large File System support
|
||||
git lfs install
|
||||
|
||||
# install Python dependencies
|
||||
# pip install huggingface-hub[cli] langchain langchain_huggingface langchain_community optimum[onnxruntime] faiss-cpu
|
||||
pip install -r ./requirements.txt
|
||||
|
||||
# environment variables
|
||||
|
||||
Reference in New Issue
Block a user