mirror of
https://github.com/FoggedLens/deflock.git
synced 2026-02-12 15:02:45 +00:00
backup directus
This commit is contained in:
36
scripts/directus-backup/README.md
Normal file
36
scripts/directus-backup/README.md
Normal file
@@ -0,0 +1,36 @@
|
||||
# Directus Backup Script
|
||||
|
||||
This script automates backups for a Directus instance, zipping key directories and uploading the archive to an S3 bucket. It is designed to be run as a cron job for regular, automated backups.
|
||||
|
||||
## Features
|
||||
- Zips the `database`, `extensions`, and `uploads` directories from your Directus instance
|
||||
- Uploads the backup archive to an S3 bucket with a timestamped filename
|
||||
- Assumes an AWS IAM role for secure S3 access
|
||||
- Cleans up temporary files after upload
|
||||
|
||||
## Prerequisites
|
||||
- Bash shell
|
||||
- `zip` utility installed
|
||||
- AWS CLI installed and configured
|
||||
- An IAM user with permission to write to the bucket
|
||||
|
||||
## Setup
|
||||
1. **Clone or copy the script to your server.**
|
||||
2. **Create a `.env` file** in the same directory as the script with the following variables:
|
||||
```env
|
||||
ROLE_ARN=arn:aws:iam::123456789012:role/directus-backup-writer
|
||||
BUCKET_NAME=your-s3-bucket-name
|
||||
SOURCE_DIR="/path/to/your/directus-instance"
|
||||
```
|
||||
3. **Install dependencies:**
|
||||
- On Ubuntu/Debian: `sudo apt-get install zip awscli jq`
|
||||
- On CentOS/RHEL: `sudo yum install zip awscli jq`
|
||||
- On Alpine: `sudo apk add zip aws-cli jq`
|
||||
4. **Test the script manually:**
|
||||
```bash
|
||||
./directus-backup.sh
|
||||
```
|
||||
5. **Set up a cron job** to run the script automatically. For example, to run every day at 2am:
|
||||
```cron
|
||||
0 2 * * * /path/to/directus-backup.sh >> /var/log/directus-backup.log 2>&1
|
||||
```
|
||||
32
scripts/directus-backup/directus-backup.sh
Normal file
32
scripts/directus-backup/directus-backup.sh
Normal file
@@ -0,0 +1,32 @@
|
||||
#!/bin/bash
|
||||
# Directus backup script: zips database, extensions, uploads, uploads to S3 with timestamp, assumes AWS role
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
# Load .env variables
|
||||
ENV_PATH="$(dirname "$0")/.env"
|
||||
if [ -f "$ENV_PATH" ]; then
|
||||
export $(grep -v '^#' "$ENV_PATH" | xargs)
|
||||
else
|
||||
echo ".env file not found at $ENV_PATH"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# CONFIGURATION
|
||||
SESSION_NAME="directus-backup-session"
|
||||
TIMESTAMP=$(date +"%Y-%m-%d_%H-%M-%S")
|
||||
ARCHIVE_NAME="directus-backup-${TIMESTAMP}.zip"
|
||||
TMP_DIR="/tmp"
|
||||
ARCHIVE_PATH="${TMP_DIR}/${ARCHIVE_NAME}"
|
||||
|
||||
# Create the zip archive
|
||||
cd "$SOURCE_DIR"
|
||||
zip -r "$ARCHIVE_PATH" database extensions uploads
|
||||
|
||||
# Upload to S3 with timestamped filename
|
||||
aws s3 cp "$ARCHIVE_PATH" "s3://${BUCKET_NAME}/${ARCHIVE_NAME}"
|
||||
|
||||
# Clean up
|
||||
rm -f "$ARCHIVE_PATH"
|
||||
|
||||
echo "Backup complete: ${ARCHIVE_NAME} uploaded to s3://${BUCKET_NAME}/"
|
||||
@@ -2,6 +2,11 @@ provider "aws" {
|
||||
region = "us-east-1"
|
||||
}
|
||||
|
||||
module "directus_backup" {
|
||||
source = "./modules/directus_backup"
|
||||
bucket_name = "deflock-directus-backups"
|
||||
}
|
||||
|
||||
module "alpr_counts" {
|
||||
module_name = "alpr_counts"
|
||||
source = "./modules/alpr_counts"
|
||||
|
||||
105
terraform/modules/directus_backup/main.tf
Normal file
105
terraform/modules/directus_backup/main.tf
Normal file
@@ -0,0 +1,105 @@
|
||||
|
||||
resource "aws_s3_bucket" "directus_backup" {
|
||||
bucket = var.bucket_name
|
||||
}
|
||||
|
||||
resource "aws_s3_bucket_lifecycle_configuration" "directus_backup_lifecycle" {
|
||||
bucket = aws_s3_bucket.directus_backup.id
|
||||
|
||||
rule {
|
||||
id = "expire-old-backups"
|
||||
status = "Enabled"
|
||||
|
||||
expiration {
|
||||
days = 15
|
||||
}
|
||||
|
||||
noncurrent_version_expiration {
|
||||
noncurrent_days = 15
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
resource "aws_s3_bucket_versioning" "directus_backup_versioning" {
|
||||
bucket = aws_s3_bucket.directus_backup.id
|
||||
versioning_configuration {
|
||||
status = "Enabled"
|
||||
}
|
||||
}
|
||||
|
||||
resource "aws_s3_bucket_server_side_encryption_configuration" "directus_backup_encryption" {
|
||||
bucket = aws_s3_bucket.directus_backup.id
|
||||
|
||||
rule {
|
||||
apply_server_side_encryption_by_default {
|
||||
sse_algorithm = "aws:kms"
|
||||
kms_master_key_id = aws_kms_key.s3_backup_key.arn
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
resource "aws_kms_key" "s3_backup_key" {
|
||||
description = "KMS key for S3 backup bucket encryption"
|
||||
deletion_window_in_days = 10
|
||||
enable_key_rotation = true
|
||||
}
|
||||
|
||||
resource "aws_iam_role" "directus_backup_writer" {
|
||||
name = "directus-backup-writer"
|
||||
assume_role_policy = jsonencode({
|
||||
Version = "2012-10-17"
|
||||
Statement = [
|
||||
{
|
||||
Action = "sts:AssumeRole"
|
||||
Effect = "Allow"
|
||||
Principal = {
|
||||
Service = "ec2.amazonaws.com"
|
||||
}
|
||||
}
|
||||
]
|
||||
})
|
||||
}
|
||||
|
||||
resource "aws_iam_policy" "directus_backup_write_policy" {
|
||||
name = "directus-backup-write-policy"
|
||||
description = "Allow write access to S3 backup bucket"
|
||||
policy = jsonencode({
|
||||
Version = "2012-10-17"
|
||||
Statement = [
|
||||
{
|
||||
Effect = "Allow"
|
||||
Action = [
|
||||
"s3:PutObject",
|
||||
"s3:PutObjectAcl",
|
||||
"s3:GetObject",
|
||||
"s3:ListBucket",
|
||||
"s3:DeleteObject"
|
||||
]
|
||||
Resource = [
|
||||
"${aws_s3_bucket.directus_backup.arn}/*",
|
||||
"${aws_s3_bucket.directus_backup.arn}"
|
||||
]
|
||||
},
|
||||
{
|
||||
Effect = "Allow"
|
||||
Action = [
|
||||
"kms:Encrypt",
|
||||
"kms:Decrypt",
|
||||
"kms:GenerateDataKey*",
|
||||
"kms:DescribeKey"
|
||||
]
|
||||
Resource = aws_kms_key.s3_backup_key.arn
|
||||
}
|
||||
]
|
||||
})
|
||||
}
|
||||
|
||||
resource "aws_iam_role_policy_attachment" "attach_write_policy" {
|
||||
role = aws_iam_role.directus_backup_writer.name
|
||||
policy_arn = aws_iam_policy.directus_backup_write_policy.arn
|
||||
}
|
||||
|
||||
variable "bucket_name" {
|
||||
description = "The name of the S3 bucket for Directus backups."
|
||||
type = string
|
||||
}
|
||||
Reference in New Issue
Block a user