commit 11cf01f08edb1d342016e14092d07dbd201e9792 Author: Ronni Skansing Date: Thu Aug 21 16:14:09 2025 +0200 Initial open source release diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml new file mode 100644 index 0000000..1b7ba3e --- /dev/null +++ b/.github/workflows/release.yml @@ -0,0 +1,101 @@ +name: Release Build and Upload + +on: + push: + tags: + - "v*.*.*" + +jobs: + build-and-release: + runs-on: ubuntu-latest + permissions: + contents: write + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + fetch-depth: 1 + + - name: Set up Docker + uses: docker/setup-buildx-action@v3 + + - name: Extract version from tag + id: get_version + run: | + echo "VERSION=${GITHUB_REF#refs/tags/v}" >> $GITHUB_OUTPUT + echo "TAG=${GITHUB_REF#refs/tags/}" >> $GITHUB_OUTPUT + echo "HASH=$(git rev-parse --short HEAD)" >> $GITHUB_OUTPUT + + - name: Build frontend files + working-directory: frontend + run: | + sudo docker run --rm \ + -v "$(pwd)":/app \ + -w /app \ + node:alpine \ + sh -c "npm ci && npm run build-production" + + - name: Move frontend build to backend + run: | + rm -rf backend/frontend/build + mkdir -p backend/frontend/build + cp -r frontend/build/* backend/frontend/build/ + + - name: Build single binary with all features + run: | + sudo docker run --rm \ + -v "$(pwd)":/app \ + -w /app/backend \ + golang:alpine \ + go build -trimpath \ + -ldflags="-X github.com/phishingclub/phishingclub/version.hash=ph${{ steps.get_version.outputs.HASH }} -X github.com/phishingclub/phishingclub/version.version=${{ steps.get_version.outputs.VERSION }}" \ + -tags production -o ../build/phishingclub main.go + + - name: Fix build directory permissions + run: | + sudo chown -R $USER:$USER build/ + chmod 755 build/ + ls -la build/ + + - name: Sign binary with Ed25519 + run: | + # Create directory for keys + mkdir -p /tmp/keys + chmod 700 /tmp/keys + + # Save both private keys from GitHub secrets + echo "${{ secrets.SIGNKEY_1 }}" > /tmp/keys/private1.pem + echo "${{ secrets.SIGNKEY_2 }}" > /tmp/keys/private2.pem + chmod 600 /tmp/keys/private1.pem + chmod 600 /tmp/keys/private2.pem + + # Sign binary with primary key (Key 1) + openssl pkeyutl -sign -inkey /tmp/keys/private1.pem \ + -rawin -in build/phishingclub \ + -out build/phishingclub.sig + + # Clean up keys + rm -rf /tmp/keys + + - name: Create compressed package with signature + run: | + mkdir -p packages + + # Package binary with signature + tar -czf packages/phishingclub_${{ steps.get_version.outputs.VERSION }}.tar.gz \ + -C build \ + phishingclub \ + phishingclub.sig + + - name: Create GitHub Release + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + gh release create ${{ steps.get_version.outputs.TAG }} \ + ./packages/phishingclub_${{ steps.get_version.outputs.VERSION }}.tar.gz \ + --title "PhishingClub ${{ steps.get_version.outputs.TAG }}" \ + --notes "PhishingClub release ${{ steps.get_version.outputs.TAG }}" + + - name: Notify about release + run: | + curl -d "phishingclub version ${{ steps.get_version.outputs.VERSION }} has been released on GitHub" https://ntfy.sh/phishing_club_released diff --git a/.github/workflows/test-build.yml b/.github/workflows/test-build.yml new file mode 100644 index 0000000..e88ef52 --- /dev/null +++ b/.github/workflows/test-build.yml @@ -0,0 +1,125 @@ +name: Test Build + +on: + #pull_request: + # branches: [ main, develop ] + push: + branches: [test-build] + +jobs: + test-build: + runs-on: ubuntu-latest + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + fetch-depth: 1 + + - name: Set up Docker + uses: docker/setup-buildx-action@v3 + + - name: Extract version info + id: get_version + run: | + echo "VERSION=test-$(date +%Y%m%d-%H%M%S)" >> $GITHUB_OUTPUT + echo "HASH=$(git rev-parse --short HEAD)" >> $GITHUB_OUTPUT + + - name: Build frontend files + working-directory: frontend + run: | + sudo docker run --rm \ + -v "$(pwd)":/app \ + -w /app \ + node:alpine \ + sh -c "npm ci && npm run build-production" + + - name: Move frontend build to backend + run: | + rm -rf backend/frontend/build + mkdir -p backend/frontend/build + cp -r frontend/build/* backend/frontend/build/ + + - name: Build single binary with all features + run: | + sudo docker run --rm \ + -v "$(pwd)":/app \ + -w /app/backend \ + golang:alpine \ + go build -trimpath \ + -ldflags="-X github.com/phishingclub/phishingclub/version.hash=ph${{ steps.get_version.outputs.HASH }} -X github.com/phishingclub/phishingclub/version.version=${{ steps.get_version.outputs.VERSION }}" \ + -tags production -o ../build/phishingclub main.go + + - name: Fix build directory permissions + run: | + sudo chown -R $USER:$USER build/ + chmod 755 build/ + ls -la build/ + + - name: Test binary signing (if keys available) + run: | + if [ -n "${{ secrets.SIGNKEY_1 }}" ]; then + echo "Testing binary signing..." + + # Create directory for keys + mkdir -p /tmp/keys + chmod 700 /tmp/keys + + # Save private key from GitHub secrets + echo "${{ secrets.SIGNKEY_1 }}" > /tmp/keys/private1.pem + chmod 600 /tmp/keys/private1.pem + + # Sign binary with primary key + openssl pkeyutl -sign -inkey /tmp/keys/private1.pem \ + -rawin -in build/phishingclub \ + -out build/phishingclub.sig + + # Clean up keys + rm -rf /tmp/keys + + echo "✅ Binary signing test successful" + else + echo "⚠️ SIGNKEY_1 not available - skipping signing test" + fi + + - name: Test package creation + run: | + mkdir -p packages + + # Test packaging + if [ -f build/phishingclub.sig ]; then + tar -czf packages/phishingclub_${{ steps.get_version.outputs.VERSION }}.tar.gz \ + -C build \ + phishingclub \ + phishingclub.sig + echo "✅ Package created with signature" + else + tar -czf packages/phishingclub_${{ steps.get_version.outputs.VERSION }}.tar.gz \ + -C build \ + phishingclub + echo "✅ Package created without signature" + fi + + - name: Verify build artifacts + run: | + echo "=== Build Summary ===" + echo "Binary size: $(du -h build/phishingclub | cut -f1)" + echo "Binary info:" + file build/phishingclub + + if [ -f build/phishingclub.sig ]; then + echo "Signature size: $(du -h build/phishingclub.sig | cut -f1)" + fi + + echo "Package size: $(du -h packages/phishingclub_${{ steps.get_version.outputs.VERSION }}.tar.gz | cut -f1)" + echo "Package contents:" + tar -tzf packages/phishingclub_${{ steps.get_version.outputs.VERSION }}.tar.gz + + - name: Upload build artifacts (for review) + uses: actions/upload-artifact@v4 + with: + name: phishingclub-test-build-${{ steps.get_version.outputs.HASH }} + path: | + build/phishingclub + build/phishingclub.sig + packages/phishingclub_${{ steps.get_version.outputs.VERSION }}.tar.gz + retention-days: 2 diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..796b96d --- /dev/null +++ b/.gitignore @@ -0,0 +1 @@ +/build diff --git a/.vscode/launch.json b/.vscode/launch.json new file mode 100644 index 0000000..87a0892 --- /dev/null +++ b/.vscode/launch.json @@ -0,0 +1,25 @@ +{ + // Use IntelliSense to learn about possible attributes. + // Hover to view descriptions of existing attributes. + // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 + "version": "0.2.0", + "configurations": [ + { + "name": "Delve remote", + "type": "go", + "request": "attach", + "mode": "remote", + "substitutePath": [ + { + "from": "${workspaceFolder}/", + "to": "/app/", + }, + ], + "port": 2345, + "host": "127.0.0.1", + "showLog": true, + "apiVersion": 2, + "trace": "verbose" + } + ] +} \ No newline at end of file diff --git a/CLA.md b/CLA.md new file mode 100644 index 0000000..10721c1 --- /dev/null +++ b/CLA.md @@ -0,0 +1,127 @@ +# Contributor License Agreement (CLA) + +Thank you for your interest in contributing to Phishing Club ("we" or "us"). + +This Contributor License Agreement ("Agreement") documents the rights granted by contributors to us. To make this document effective, please read it carefully and indicate your agreement by signing off on your commits as described below. + +## 1. Definitions + +**"You"** (or "Your") shall mean the copyright owner or legal entity authorized by the copyright owner that is making this Agreement with us. + +**"Contribution"** shall mean any original work of authorship, including any modifications or additions to an existing work, that is intentionally submitted by You to us for inclusion in, or documentation of, any of our products. + +**"Submit"** means any form of electronic, verbal, or written communication sent to us or our representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems. + +## 2. Grant of Copyright License + +Subject to the terms and conditions of this Agreement, You hereby grant to us and to recipients of software distributed by us a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to: + +- Use, reproduce, modify, display, perform, sublicense, and distribute Your Contributions +- License Your Contributions under the GNU Affero General Public License v3.0 (AGPL-3.0) +- License Your Contributions under commercial licenses that allow redistribution without AGPL restrictions + +## 3. Grant of Patent License + +Subject to the terms and conditions of this Agreement, You hereby grant to us and to recipients of software distributed by us a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer Your Contributions. + +## 4. Dual Licensing + +You understand and agree that: + +- Your Contributions will be available under the AGPL-3.0 license +- We may also license Your Contributions under commercial licenses +- This dual licensing model allows us to offer commercial licenses to users who prefer not to comply with AGPL-3.0 requirements +- Revenue from commercial licenses helps support the continued development of this open source project + +## 5. Representations + +You represent that: + +1. **Legal Right**: You have the legal right to grant the above licenses +2. **Original Work**: Each of Your Contributions is Your original creation (or You have sufficient rights to grant the licenses for any third-party material included) +3. **No Violations**: Your Contributions do not violate any third-party rights, including intellectual property rights +4. **Accuracy**: The information You provide in this Agreement is accurate + +## 6. Disclaimer + +You provide Your Contributions on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. + +## 7. How to Sign + +To indicate your acceptance of this CLA, include the following line in all your commit messages: + +``` +Signed-off-by: Your Full Name +``` + +You can add this automatically by using the `-s` flag when committing: + +```bash +git commit -s -m "Your commit message" +``` + +By including this sign-off, you certify that: +- You have read and agree to this CLA +- Your contribution complies with the Developer Certificate of Origin (DCO) +- You have the right to submit the contribution under these terms + +## 8. Developer Certificate of Origin (DCO) + +By signing off on your commits, you also agree to the Developer Certificate of Origin v1.1: + +``` +Developer Certificate of Origin +Version 1.1 + +Copyright (C) 2004, 2006 The Linux Foundation and its contributors. + +Everyone is permitted to copy and distribute verbatim copies of this +license document, but changing it is not allowed. + +Developer's Certificate of Origin 1.1 + +By making a contribution to this project, I certify that: + +(a) The contribution was created in whole or in part by me and I + have the right to submit it under the open source license + indicated in the file; or + +(b) The contribution is based upon previous work that, to the best + of my knowledge, is covered under an appropriate open source + license and I have the right under that license to submit that + work with modifications, whether created in whole or in part + by me, under the same open source license (unless I am + permitted to submit under a different license), as indicated + in the file; or + +(c) The contribution was provided directly to me by some other + person who certified (a), (b) or (c) and I have not modified + it. + +(d) I understand and agree that this project and the contribution + are public and that a record of the contribution (including all + personal information I submit with it, including my sign-off) is + maintained indefinitely and may be redistributed consistent with + this project or the open source license(s) involved. +``` + +## 9. Corporate Contributors + +If You are contributing on behalf of a corporation or other legal entity, the entity must also agree to this CLA. Please have an authorized representative of the entity sign a Corporate CLA by contacting us at legal@phishing.club. + +## 10. Contact + +For questions about this CLA, please contact: +- Email: help@phishing.club +- Create a GitHub issue for clarifications +- Join our Discord: https://discord.gg/Zssps7U8gX + +## 11. Changes to this Agreement + +We may update this CLA from time to time. We will notify contributors of any significant changes through our usual communication channels (GitHub and Discord). + +--- + +**Thank you for contributing to Phishing Club!** + +This CLA helps ensure that the project can continue to be developed and distributed under both open source and commercial licenses, benefiting the entire community while supporting the project's sustainability. diff --git a/CONTRIBUTORS.md b/CONTRIBUTORS.md new file mode 100644 index 0000000..24b5b7f --- /dev/null +++ b/CONTRIBUTORS.md @@ -0,0 +1,80 @@ +# Contributors + +This file acknowledges the individuals and organizations who have contributed to Phishing Club. + +## How to Contribute + +We welcome contributions from the community! Before contributing, please: + +1. Read our [Contributing Guidelines](README.md#contributing) +2. Sign our Contributor License Agreement (see below) +3. Follow our development workflow and coding standards + +## Contributor License Agreement (CLA) + +**Important**: By contributing to Phishing Club, you agree to the following terms: + +### Grant of Rights +You hereby grant to Phishing Club and to recipients of software distributed by Phishing Club a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable license to: + +- Use, reproduce, modify, display, perform, sublicense, and distribute your contributions +- License your contributions under both the AGPL-3.0 license and commercial licenses + +### Representations +You represent that: +- You have the legal right to grant the above licenses +- Your contributions are your original creation or you have sufficient rights to grant the licenses +- Your contributions do not violate any third-party rights +- You understand and agree that your contributions may be licensed under both open source and commercial terms + +### Developer Certificate of Origin (DCO) +All commits must include a "Signed-off-by" line to indicate agreement with the [Developer Certificate of Origin](https://developercertificate.org/): + +``` +git commit -s -m "Your commit message" +``` + +This adds a line like: +``` +Signed-off-by: Your Name +``` + +By adding this line, you certify that you have the right to contribute the code and agree to our CLA terms. + +## Recognition + +Contributors will be recognized in the following ways: +- Listed in this file (with permission) +- GitHub contributor statistics + +## Types of Contributions +Phishing Club has a lots of room for improvement, both in maintaince and in features that can be implemented. Much of the code can be refactored and improved in various ways. + +Join our discord and we can help you if you want a specific project. + +Be mindful that all contributions should strive to be secure and work with the +live update system. + +We appreciate all forms of contribution: +- 🐛 Bug reports and fixes +- ✨ New features and enhancements +- 📖 Documentation improvements +- 🧪 Test coverage improvements +- 🎨 UI/UX improvements +- 🔒 Security improvements +- 🌍 Translations and internationalization +- 💡 Ideas and feature suggestions + +## Contact + +For questions about contributing or the CLA: +- Create a GitHub issue +- Join our [Discord community](https://discord.gg/Zssps7U8gX) +- Email: contribute@phishing.club + +--- + +*Thank you to all contributors who help make Phishing Club better!* 🙏 + +# CONTRIBUTORS +**contributors add yourself here** diff --git a/COPYRIGHT b/COPYRIGHT new file mode 100644 index 0000000..3802c38 --- /dev/null +++ b/COPYRIGHT @@ -0,0 +1,55 @@ +Copyright (C) 2025-present Phishing Club + +This file is part of Phishing Club. + +Phishing Club is free software: you can redistribute it and/or modify +it under the terms of the GNU Affero General Public License as published by +the Free Software Foundation, either version 3 of the License, or +(at your option) any later version. + +Phishing Club is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU Affero General Public License for more details. + +You should have received a copy of the GNU Affero General Public License +along with Phishing Club. If not, see . + +Additional permissions under GNU AGPL version 3 section 7: + +If you modify this Program, or any covered work, by linking or combining +it with other code, such other code is not for that reason alone subject +to any of the requirements of the GNU AGPL version 3. + +For commercial licensing that allows you to distribute Phishing Club +or derivative works without the restrictions of the AGPL, please contact +license@phishing.club. + +================================================================================ + +DUAL LICENSING + +Phishing Club is available under a dual licensing model: + +1. Open Source License (AGPL-3.0) + - Free for open source projects + - Free for educational and research use + - Free for internal security testing + - Requires source code disclosure for network services + +2. Commercial License + - Removes AGPL restrictions + - Allows proprietary use and distribution + - Enables SaaS offerings without source disclosure + - Includes commercial support options + +For commercial licensing inquiries, contact: license@phishing.club + +================================================================================ + +CONTACT + +Website: https://phishing.club +Email: license@phishing.club +Security: security@phishing.club +Discord: https://discord.gg/Zssps7U8gX diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..04d6565 --- /dev/null +++ b/LICENSE @@ -0,0 +1,681 @@ + GNU AFFERO GENERAL PUBLIC LICENSE + Version 3, 19 November 2007 + + Copyright (C) 2025-present Phishing Club + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU Affero General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU Affero General Public License for more details. + + You should have received a copy of the GNU Affero General Public License + along with this program. If not, see . + +================================================================================ + + GNU AFFERO GENERAL PUBLIC LICENSE + Version 3, 19 November 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU Affero General Public License is a free, copyleft license for +software and other kinds of works, specifically designed to ensure +cooperation with the community in the case of network server software. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +our General Public Licenses are intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + Developers that use our General Public Licenses protect your rights +with two steps: (1) assert copyright on the software, and (2) offer +you this License which gives you legal permission to copy, distribute +and/or modify the software. + + A secondary benefit of defending all users' freedom is that +improvements made in alternate versions of the program, if they +receive widespread use, become available for other developers to +incorporate. Many developers of free software are heartened and +encouraged by the resulting cooperation. However, in the case of +software used on network servers, this result may fail to come about. +The GNU General Public License permits making a modified version and +letting the public access it on a server without ever releasing its +source code to the public. + + The GNU Affero General Public License is designed specifically to +ensure that, in such cases, the modified source code becomes available +to the community. It requires the operator of a network server to +provide the source code of the modified version running there to the +users of that server. Therefore, public use of a modified version, on +a publicly accessible server, gives the public access to the source +code of the modified version. + + An older license, called the Affero General Public License and +published by Affero, was designed to accomplish similar goals. This is +a different license, not a version of the Affero GPL, but Affero has +released a new version of the Affero GPL which permits relicensing under +this license. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU Affero General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Remote Network Interaction; Use with the GNU General Public License. + + Notwithstanding any other provision of this License, if you modify the +Program, your modified version must prominently offer all users +interacting with it remotely through a computer network (if your version +supports such interaction) an opportunity to receive the Corresponding +Source of your version by providing access to the Corresponding Source +from a network server at no charge, through some standard or customary +means of facilitating copying of software. This Corresponding Source +shall include the Corresponding Source for any work covered by version 3 +of the GNU General Public License that is incorporated pursuant to the +following paragraph. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the work with which it is combined will remain governed by version +3 of the GNU General Public License. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU Affero General Public License from time to time. Such new versions +will be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU Affero General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU Affero General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU Affero General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU Affero General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU Affero General Public License for more details. + + You should have received a copy of the GNU Affero General Public License + along with this program. If not, see . + +Also add information on how to contact you by electronic and paper mail. + + If your software can interact with users remotely through a computer +network, you should also make sure that it provides a way for users to +get its source. For example, if your program is a web application, its +interface could display a "Source" link that leads users to an archive +of the code. There are many ways you could offer source, and different +solutions will be better for different programs; see section 13 for the +specific requirements. + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU AGPL, see +. diff --git a/NOTICE b/NOTICE new file mode 100644 index 0000000..18a2412 --- /dev/null +++ b/NOTICE @@ -0,0 +1,67 @@ +Phishing Club +Copyright (C) 2025-present Phishing Club + +This product includes software developed by the Phishing Club project. + +================================================================================ + +DUAL LICENSE NOTICE +================================================================================ + +This software is available under a dual licensing model: + +1. GNU Affero General Public License v3.0 (AGPL-3.0) + - For open source use, educational purposes, and non-commercial applications + - Full license text available in the LICENSE file + - Source code must be made available when distributed or used as a network service + +2. Commercial License + - For commercial use without AGPL restrictions + - For proprietary integrations and SaaS offerings + - Contact license@phishing.club for commercial licensing terms + +================================================================================ + +THIRD PARTY COMPONENTS +================================================================================ + +This software may include third-party components with their own licensing terms. +Please refer to the respective component documentation and license files for +complete licensing information. + +================================================================================ + +CONTRIBUTOR LICENSE AGREEMENT +================================================================================ + +By contributing to this project, contributors agree that their contributions +will be licensed under the same dual license terms (AGPL-3.0 and commercial). +Contributors confirm they have the right to contribute the code and grant +the project maintainers the right to license contributions under both licenses. + +================================================================================ + +CONTACT INFORMATION +================================================================================ + +For licensing inquiries: license@phishing.club +For security issues: security@phishing.club +Project website: https://phishing.club +Community Discord: https://discord.gg/Zssps7U8gX + +================================================================================ + +ETHICAL USE DISCLAIMER +================================================================================ + +This software is designed for authorized security testing, penetration testing, +and security awareness training purposes only. Users are responsible for: + +- Obtaining proper authorization before conducting any phishing simulations +- Complying with all applicable laws and regulations +- Using the software ethically and responsibly +- Protecting any data collected during authorized testing + +Misuse of this software may violate applicable laws. Users are solely +responsible for ensuring their use complies with all applicable laws +and regulations. diff --git a/README.md b/README.md new file mode 100644 index 0000000..dcdbf16 --- /dev/null +++ b/README.md @@ -0,0 +1,263 @@ +# Phishing Club + +[![Latest Release](https://img.shields.io/github/v/release/phishingclub/phishingclub)](https://github.com/phishingclub/phishingclub/releases/latest) +[![Downloads](https://img.shields.io/github/downloads/phishingclub/phishingclub/total)](https://github.com/phishingclub/phishingclub/releases) +[![Discord](https://img.shields.io/badge/Discord-Join%20Server-7289da?style=flat&logo=discord&logoColor=white)](https://discord.gg/Zssps7U8gX) +[![License: AGPL v3](https://img.shields.io/badge/License-AGPL%20v3-blue.svg)](https://www.gnu.org/licenses/agpl-3.0) + + + +The self-hosted phishing framework for security awareness training and penetration testing. + +## Overview + +Phishing Club is a phishing simulation framework designed for security professionals, red teams, and organizations looking to test and improve their security awareness. This platform provides tools for creating, deploying, and managing phishing campaigns in a controlled environment. + +## License + +Phishing Club is available under a dual licensing model: + +### Open Source License (AGPL-3.0) +This project is licensed under the GNU Affero General Public License v3.0 (AGPL-3.0). This means: +- ✅ You can use, modify, and distribute the software freely +- ✅ Perfect for educational, research, and non-commercial use +- ✅ You can run your own instance for internal security testing +- ⚠️ **Important**: If you provide the software as a network service (SaaS), you must make your source code available under AGPL-3.0 + +### Commercial License +For organizations that want to: +- Use Phishing Club in commercial products without AGPL restrictions +- Offer Phishing Club as a service without source code disclosure +- Integrate with proprietary software +- Get dedicated support and maintenance + +**Contact us for commercial licensing**: [license@phishing.club](mailto:license@phishing.club) + +See the [LICENSE](LICENSE) file for the full AGPL-3.0 terms. + +## Getting Started + +### Production Installation + +For production use, download the latest release and follow our installation guide: + +1. **Download the latest version** from [GitHub Releases](https://github.com/phishingclub/phishingclub/releases) +2. **Follow the installation guide** at [https://phishing.club/guide/management/#install](https://phishing.club/guide/management/#install) +3. **Complete the setup** by following the step-by-step instructions in our documentation + +For detailed setup instructions, troubleshooting, and best practices, visit the [Phishing Club Guide](https://phishing.club/guide/introduction/). + +## Development Setup + +This repository contains the core Phishing Club platform. + +### Prerequisites + +- Docker and Docker Compose +- Git +- Make (optional, for convenience commands) + +### Quick Start + +1. **Clone the repository:** +```bash +git clone https://github.com/phishingclub/phishingclub.git +cd phishingclub +``` + +2. **Start the services:** +```bash +make up +# or manually: +docker compose up -d +``` + +3. **Access the platform:** +- Administration: `http://localhost:8003` +- HTTP Phishing Server: `http://localhost:80` +- HTTPS Phishing Server: `https://localhost:443` + +4. **Get admin credentials:** + +The **username** and **password** are output in the terminal when you start the services. If you restart the backend service before completing setup by logging in, the username and password will change. + +```bash +make backend-password +``` + +5. **Setup and start phishing:** + +Open `https://localhost:8003` and setup the admin account using the credentials from step 4. + +Visit the [Phishing Club Guide](https://phishing.club/guide/introduction/) for more information. + +## Services and Ports + +| Port | Service | Description | +|------|---------|-------------| +| 80 | HTTP Phishing Server | HTTP phishing server for campaigns | +| 443 | HTTPS Phishing Server | HTTPS phishing server with SSL | +| 8002 | Backend API | Backend API server | +| 8003 | Frontend | Development frontend with Vite | +| 8101 | Database Viewer | DBGate database administration | +| 8102 | Mail Server | Mailpit SMTP server for testing | +| 8103 | Container Logs | Dozzle log viewer | +| 8104 | Container Stats | Docker container statistics | +| 8201 | ACME Server | Pebble ACME server for certificates | +| 8202 | ACME Management | Pebble management interface | + +## Development Commands + +```bash +# Start all services +make up + +# Stop all services +make down + +# View logs +make logs + +# Restart specific service +make backend-restart +make frontend-restart + +# Access service containers +make backend-attach +make frontend-attach + +# Reset backend database +make backend-db-reset + +# Get backend admin password +make backend-password +``` + +## Development Domains + +All domains ending with `.test` are automatically handled by the development setup. To use custom domains during development: + +### Option 1: DNSMasq (Recommended) +```bash +# Add to your DNSMasq configuration +address=/.test/127.0.0.1 +``` + +### Option 2: Hosts File +Add to `/etc/hosts`: +``` +127.0.0.1 microsoft.test +127.0.0.1 google.test +127.0.0.1 vikings.test +127.0.0.1 dark-water.test +``` + +## Configuration + +### Environment Variables + +Copy the example environment file and customize: +```bash +cp backend/.env.example backend/.env.development +``` + +Key configuration options: +- Database settings +- SMTP configuration +- Domain settings +- Security keys + +### SSL Certificates + +The development environment uses Pebble ACME server for automatic SSL certificate generation. In production, configure your preferred ACME provider or upload custom certificates. + +## Contributing + +We welcome contributions from the community! Please follow our contribution guidelines: + +### Before Contributing + +1. **Check existing issues** - Search for existing feature requests or bug reports +2. **Create a feature request** - If your idea doesn't exist, create a detailed feature request issue, we have criteria for which features we want to add and do not waste anyones time with feature requests we never wanted. +3. **Wait for approval** - Allow us to review and approve your proposal +4. **Discuss implementation** - We may suggest changes or alternative approaches + +### Development Workflow + +1. **Fork the repository** and clone your fork +2. **Create a feature branch** from `main`: + ```bash + git checkout -b feat/your-feature-name + ``` +3. **Follow naming conventions**: + - Features: `feat/feature-name` + - Bug fixes: `fix/bug-description` + - Documentation: `docs/update-description` + - Refactoring: `refactor/component-name` + +4. **Follow conventions**: + - Follow existing code style and patterns + - Update documentation as needed + +5. **Prepare for submission**: + - **Rebase your commits** to a single, clean commit before creating the pull request + - **Sign your commit** using the `-s` flag: `git commit -s -m "Your commit message"` + - Ensure your commit message is clear and descriptive + +6. **Submit a pull request**: + - Reference the related issue number + - Provide a clear description of changes + - Include screenshots/videos for UI changes + +### Code Standards + +- **Formatting**: Use project configurations +- **Documentation**: Update relevant docs with your changes +- **Security**: Follow secure coding practices + +### License Agreement + +**Important**: All contributors must agree to our Contributor License Agreement (CLA). + +By contributing to Phishing Club, you agree that your contributions will be licensed under the same dual license terms (AGPL-3.0 and commercial). You confirm that: + +- You have the right to contribute the code +- Your contributions are your original work or properly attributed +- You grant Phishing Club the right to license your contributions under both AGPL-3.0 and commercial licenses + +**Required**: +- All commits must be signed off using the `-s` flag: `git commit -s -m "Your commit message"` +- Before submitting a pull request, rebase your branch to a single commit +- Use descriptive commit messages that explain what and why + +```bash +# Example workflow: +git rebase -i main # Interactive rebase against main branch to squash commits +git commit --amend -s # Add sign-off to the final commit if needed +``` + +This adds a "Signed-off-by" line indicating you agree to our [CLA](CLA.md) and the [Developer Certificate of Origin](https://developercertificate.org/). + +For detailed terms, see: +- [Contributor License Agreement (CLA.md)](CLA.md) +- [Contributors Guide (CONTRIBUTORS.md)](CONTRIBUTORS.md) + + +## Support and Security + +Need help, join the [Phishing Club Discord](https://discord.gg/Zssps7U8gX) + +- **Security Issues**: Report privately via [security@phishing.club](mailto:security@phishing.club) +- **Commercial Licensing**: Contact [license@phishing.club](mailto:license@phishing.club) +- **General Support**: Join our Discord community or open a GitHub issue + +## Only for ethical use + +This platform is designed for authorized security testing only. Users are responsible for: + +- Obtaining proper authorization before conducting phishing simulations +- Complying with all applicable laws and regulations +- Using the platform ethically and responsibly +- Protecting any data collected during testing + +This tool is for authorized security testing only. Misuse of this software may violate applicable laws. Users are solely responsible for ensuring their use complies with all applicable laws and regulations. diff --git a/api-test-server/Dockerfile b/api-test-server/Dockerfile new file mode 100644 index 0000000..0e763c8 --- /dev/null +++ b/api-test-server/Dockerfile @@ -0,0 +1,17 @@ +FROM golang:1.24.5 + +WORKDIR /app + +# Add user +# Add group with ID 1000 and user with ID 1000 +RUN groupadd -g 1000 appuser && \ + useradd -r -u 1000 -g appuser appuser -d /home/appuser -m + +COPY go.mod /app/go.mod +COPY main.go /app/main.go +RUN chown -R appuser:appuser /app + +USER appuser +RUN go mod tidy + +CMD ["go", "run", "main.go"] diff --git a/api-test-server/README.md b/api-test-server/README.md new file mode 100644 index 0000000..f280af4 --- /dev/null +++ b/api-test-server/README.md @@ -0,0 +1 @@ +This service is used for testing and using the functionality of the api sender and webhook functionality. diff --git a/api-test-server/go.mod b/api-test-server/go.mod new file mode 100644 index 0000000..5ea4dd1 --- /dev/null +++ b/api-test-server/go.mod @@ -0,0 +1,3 @@ +module github.com/phishingclub/phishingclub/api-test-server + +go 1.23.8 diff --git a/api-test-server/main.go b/api-test-server/main.go new file mode 100644 index 0000000..45c03a9 --- /dev/null +++ b/api-test-server/main.go @@ -0,0 +1,164 @@ +package main + +import ( + "crypto/hmac" + "crypto/sha256" + "encoding/hex" + "encoding/json" + "errors" + "fmt" + "io" + "log" + "math/rand" + "net/http" + "strings" + "time" +) + +type Message struct { + To string `json:"to"` + From string `json:"from"` + Content string `json:"content"` + APIKey string `json:"apiKey"` +} + +func (m *Message) isValid() error { + if m.To == "" { + return errors.New("missing 'to' field") + } + if m.From == "" { + return errors.New("missing 'from' field") + } + if m.Content == "" { + return errors.New("missing 'content' field") + } + if m.APIKey == "" { + return errors.New("missing 'apiKey' field") + } + return nil +} + +func main() { + mux := http.NewServeMux() + mux.HandleFunc("POST /api-sender/{clientID}", handleAPISender) + mux.HandleFunc("POST /webhook", handleTestWebhook) // todo rename method and usage to test prefoxhl + err := http.ListenAndServe(":80", mux) + if err != nil { + panic(err) + } +} + +func handleAPISender(w http.ResponseWriter, req *http.Request) { + body1, body2, err := cloneBody(req) + if err != nil { + log.Println("failed to clone request body:", err) + http.Error(w, "failed to clone request body", http.StatusInternalServerError) + return + } + log.Println("received api send request") + log.Println(prettyRequest(req, body1)) + + clientID := req.PathValue("clientID") + if clientID != "5200" { + log.Println("invalid client ID") + http.Error(w, "invalid client ID", http.StatusForbidden) + return + } + // parse message + msg := &Message{} + dec := json.NewDecoder(body2) + if err := dec.Decode(&msg); err != nil { + log.Println("failed to decode message:", err) + http.Error(w, "invalid message", http.StatusBadRequest) + return + } + if err := msg.isValid(); err != nil { + log.Println("invalid message:", err) + http.Error(w, err.Error(), http.StatusBadRequest) + return + } + sleepTime := time.Duration(rand.Intn(2)+1) * time.Second + log.Printf("sleeping for %f seconds\n", sleepTime.Seconds()) + time.Sleep(sleepTime) + + // return success + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusOK) + json.NewEncoder(w).Encode(map[string]string{"data": "message sent"}) + log.Println("message sent successfully") +} + +func handleTestWebhook(w http.ResponseWriter, req *http.Request) { + log.Println("received webhook") + body1, body2, err := cloneBody(req) + if err != nil { + log.Println("failed to clone request body:", err) + http.Error(w, "failed to clone request body", http.StatusInternalServerError) + return + } + log.Println(prettyRequest(req, body1)) + // sleep random time between 1 and 3 seconds + time.Sleep(time.Duration(rand.Intn(2)+1) * time.Second) + bodyBytes, err := io.ReadAll(body2) + if err != nil { + log.Println("failed to read body for HMAC calculation:", err) + http.Error(w, "failed to read body", http.StatusInternalServerError) + return + } + // Calculate HMAC256 + // from seed/webhooks.go + h := hmac.New(sha256.New, []byte("WEBHOOK_TEST_KEY@1234")) + h.Write(bodyBytes) + calculatedHMAC := hex.EncodeToString(h.Sum(nil)) + + // Get the signature from the header + signature := req.Header.Get("x-signature") + if signature == "" { + log.Println("missing x-signature header") + http.Error(w, "missing x-signature header", http.StatusBadRequest) + return + } + if signature != "UNSIGNED" { + // Compare the calculated HMAC with the signature + if calculatedHMAC != signature { + log.Println("invalid HMAC signature") + http.Error(w, "invalid HMAC signature", http.StatusForbidden) + return + } + log.Println("valid HMAC signature") + } else { + log.Println("skipping HMAC signature") + } + + // return success + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusOK) + json.NewEncoder(w).Encode(map[string]string{"data": "webhook processed"}) + //log.Printf("respone: %++v\n", w) + log.Println("test webhook processed successfully") +} + +func prettyRequest(req *http.Request, body io.ReadCloser) string { + l := fmt.Sprintf("Request:\n\tMethod: %s\n\tURL: %s\n\tHeaders:\n", req.Method, req.URL) + for k, v := range req.Header { + // which headers have multiple values? + value := strings.Join(v, "") + l += fmt.Sprintf("\t\t%s: %s\n", k, value) + } + b, err := io.ReadAll(body) + if err != nil { + return l + fmt.Sprintf("\tBody: failed to read body: %v\n", err) + } + l += fmt.Sprintf("\tBody: %s\n", string(b)) + return l +} + +func cloneBody(req *http.Request) (io.ReadCloser, io.ReadCloser, error) { + bodyBytes, err := io.ReadAll(req.Body) + if err != nil { + return nil, nil, err + } + body1 := io.NopCloser(strings.NewReader(string(bodyBytes))) + body2 := io.NopCloser(strings.NewReader(string(bodyBytes))) + return body1, body2, nil +} diff --git a/backend/.air.docker.toml b/backend/.air.docker.toml new file mode 100755 index 0000000..e648944 --- /dev/null +++ b/backend/.air.docker.toml @@ -0,0 +1,49 @@ +root = "." +testdata_dir = "testdata" +tmp_dir = ".dev" + +[build] +args_bin = ["-files ./.dev", "-config ./config.docker.json"] +bin = "/app/.dev-air/platform" +cmd = "CGO_ENABLED=1 go build dev -o ./.dev-air/platform main.go" # community +delay = 1000 +exclude_dir = [ + "out", + ".dev", + "vendor", + "testdata", + ".git", + "build", + "frontend/build/", +] +exclude_file = [] +exclude_regex = ["_test.go"] +exclude_unchanged = false +follow_symlink = false +#full_bin = "dlv exec --log-dest /.dev/dlv.log --accept-multiclient --headless --continue --listen 0.0.0.0:2345 --api-version 2 /app/air/platform --" +# to debug something early in the application like at boot up, then remove the continue flag +# this will make the debugger not start the program before a client attaches to it +#full_bin = "dlv exec --accept-multiclient --headless --continue --listen 0.0.0.0:2345 --api-version 2 /app/air/platform --" +include_dir = [""] +include_ext = ["go", "tpl", "tmpl", "html"] +kill_delay = "0s" +log = "build-errors.log" +send_interrupt = true #false +stop_on_error = true + +[color] +# app = "red" +# build = "yellow" +# main = "magenta" +# runner = "green" +# watcher = "cyan" + +[log] +time = false +main_only = false + +[misc] +clean_on_exit = false + +[screen] +clear_on_rebuild = false diff --git a/backend/.dockerignore b/backend/.dockerignore new file mode 100644 index 0000000..eb01be8 --- /dev/null +++ b/backend/.dockerignore @@ -0,0 +1,2 @@ +**/node_modules/ +./.dev/** diff --git a/backend/.env.development b/backend/.env.development new file mode 100644 index 0000000..e69de29 diff --git a/backend/.gitignore b/backend/.gitignore new file mode 100644 index 0000000..8231c3e --- /dev/null +++ b/backend/.gitignore @@ -0,0 +1,10 @@ +.dev-air/* +.dev/**/* +db.sqlite3 +*.sqlite3 +# air +air +vendor +frontend/build/* +go.work +go.work.sum diff --git a/backend/.ignore b/backend/.ignore new file mode 100644 index 0000000..22d0d82 --- /dev/null +++ b/backend/.ignore @@ -0,0 +1 @@ +vendor diff --git a/backend/.zed/settings.json b/backend/.zed/settings.json new file mode 100644 index 0000000..152c087 --- /dev/null +++ b/backend/.zed/settings.json @@ -0,0 +1,9 @@ +{ + "lsp": { + "gopls": { + "initialization_options": { + "buildFlags": ["-tags=dev"] + } + } + } +} diff --git a/backend/Dockerfile b/backend/Dockerfile new file mode 100644 index 0000000..c9ae40c --- /dev/null +++ b/backend/Dockerfile @@ -0,0 +1,27 @@ +# development docker file +FROM golang:1.24.5 + +EXPOSE 8000 8001 + +WORKDIR /app + +# Add user +# Add group with ID 1000 and user with ID 1000 +RUN groupadd -g 1000 appuser && \ + useradd -r -u 1000 -g appuser appuser -d /home/appuser -m + +# install deps +#RUN go install github.com/cosmtrek/air@latest \ +#RUN go install github.com/go-delve/delve/cmd/dlv@1.9.1 +COPY go.mod /app/go.mod +COPY go.sum /app/go.sum +RUN mkdir -p /app/.dev +RUN mkdir -p /app/.test +RUN chown -R appuser:appuser /app + + +USER appuser +RUN go install github.com/cosmtrek/air@v1.40.4 && go install github.com/go-delve/delve/cmd/dlv@latest +RUN go mod tidy + +CMD ["air", "-c", "/.dev-air/.air.docker.toml"] diff --git a/backend/README.md b/backend/README.md new file mode 100644 index 0000000..8216461 --- /dev/null +++ b/backend/README.md @@ -0,0 +1,53 @@ +### Platform backend +Install AIR for auto-reloading `go install github.com/cosmtrek/air@latest` + +To start the project locally run: `make backend-dev` + +Check the terminal output to see the address, username and password + +### Production / Deployment +The program must be executable +`chmod +x ./path/to/binary` +The program must have rights to serve on privliged ports +`sudo setcap CAP_NET_BIND_SERVICE=+eip /path/to/binary` + +### Known Issues + +#### Hot reloading not working / New files now working +If a file accessible in the frontend after adding it, save an existing file to trigger a rebuild. This should now include the new file. If this does not work, try to run `make sorry` which will restart all services. + + ### Debugging with AIR via. docker and delve + To debug the backend you must uncomment the full bin line in the docker air toml file. + Attach to the debugger to trigger starting the backend. + Do not edit files while in debug mode, instead stop the debugger, edit the file and start the debugger again. + +### docker-compose +docker-compose is a plugin for docker that allows you to define multiple services in a single file. +Before this was a stand alone python script run with `docker-compose` but as a plugin this is `docker compose`. + +In the makefile, you can edit the top line to change if docker compose is called with or without the dash (-) in the middle. + +# Notes about allow listing + +{ + admin_allowed + trusted_proxies + trusted_ip_header +} + +if no admin_allowed is set, all IPs are welcome. + +If no trusted proxies are set, headers such as X-Forwarded-By will not be used. + +If TrustedIPHeader is set, then this header is used for finding the real IP. +For example cloudflare uses cf-connecting-ip. + +If TrustedIPHeader is not set and trusted_proxies is set, then it trusts the IP +from X-Forwarded + +# SSO Setup +## Microsoft Entra-ID + +### Ensure only specific tenant user's can log in. +In 'properties' set 'Assignment required' to 'Yes'. +In 'Users and groups' add the users or groups that should be able to log into the application. diff --git a/backend/acme/certmagic.go b/backend/acme/certmagic.go new file mode 100644 index 0000000..6685987 --- /dev/null +++ b/backend/acme/certmagic.go @@ -0,0 +1,89 @@ +package acme + +import ( + _ "embed" + + "github.com/caddyserver/certmagic" + "github.com/go-errors/errors" + "github.com/phishingclub/phishingclub/config" + "github.com/phishingclub/phishingclub/database" + "go.uber.org/zap" + "go.uber.org/zap/zapcore" + "gorm.io/gorm" +) + +// maintenanceCore wraps the original core to filter maintenance messages +type maintenanceCore struct { + zapcore.Core + originalCore zapcore.Core +} + +func (c *maintenanceCore) Check(ent zapcore.Entry, ce *zapcore.CheckedEntry) *zapcore.CheckedEntry { + if ent.Message == "started background certificate maintenance" { + c.Core = c.originalCore + return nil + } + return c.Core.Check(ent, ce) +} + +func (c *maintenanceCore) With(fields []zapcore.Field) zapcore.Core { + return &maintenanceCore{ + Core: c.Core.With(fields), + originalCore: c.originalCore, + } +} + +func setupCertMagic( + certStoragePath string, + conf *config.Config, + db *gorm.DB, + logger *zap.SugaredLogger, +) (*certmagic.Config, *certmagic.Cache, error) { + l := logger.Desugar() + usedLogger := l.Core() + if l.Level() != zap.DebugLevel { + usedLogger = &maintenanceCore{ + Core: l.Core(), + originalCore: usedLogger, + } + } + filteredLogger := zap.New(usedLogger) + + // Create main config first + certmagic.DefaultACME.Logger = l + certmagic.DefaultACME.Email = conf.ACMEEmail() + mainConfig := certmagic.NewDefault() + mainConfig.Logger = l + mainConfig.Storage = &certmagic.FileStorage{Path: certStoragePath} + mainConfig.OnDemand = &certmagic.OnDemandConfig{ + DecisionFunc: func(name string) error { + // check if admin server with auto TLS + if conf.TLSAuto() && conf.TLSHost() == name { + return nil + } + // check phishing host with managed TLS + res := db. + Select("id"). + Where("name = ?", name). + Where("managed_tls_certs IS true"). + First(&database.Domain{}) + + if res.RowsAffected > 0 { + return nil + } + return errors.Errorf("not allowing TLS on-demand request for '%s'", name) + }, + } + // create cache with config getter + var finalConfig *certmagic.Config + defaultCache := certmagic.NewCache(certmagic.CacheOptions{ + GetConfigForCert: func(cert certmagic.Certificate) (*certmagic.Config, error) { + return finalConfig, nil + }, + Logger: filteredLogger, + }) + // create final config that uses the cache + finalConfig = certmagic.New(defaultCache, *mainConfig) + + return finalConfig, defaultCache, nil +} diff --git a/backend/acme/certmagic_dev.go b/backend/acme/certmagic_dev.go new file mode 100644 index 0000000..ce18abc --- /dev/null +++ b/backend/acme/certmagic_dev.go @@ -0,0 +1,56 @@ +//go:build dev + +package acme + +import ( + "crypto/x509" + _ "embed" + "encoding/pem" + "log" + + "github.com/caddyserver/certmagic" + "github.com/phishingclub/phishingclub/config" + "github.com/phishingclub/phishingclub/errs" + "go.uber.org/zap" + "gorm.io/gorm" +) + +const DEV_ACME_URL = "https://pebble:14000/dir" + +//go:embed pebble.minica.pem +var acmeRootCertPemBlock []byte + +func loadDevelopmentPebbleCertificate() (*x509.Certificate, error) { + certDERBlock, _ := pem.Decode(acmeRootCertPemBlock) + if certDERBlock == nil { + log.Fatal("Failed to parse the certificate PEM.") + } + acmeRootCert, err := x509.ParseCertificate(certDERBlock.Bytes) + if err != nil { + log.Fatal(err) + } + return acmeRootCert, nil +} + +// SetupCertMagic creates a certmagic config for development +// and checks which domains are allowed from the db before getting a certificate +func SetupCertMagic( + certStoragePath string, + conf *config.Config, + db *gorm.DB, + logger *zap.SugaredLogger, +) (*certmagic.Config, *certmagic.Cache, error) { + cert, err := loadDevelopmentPebbleCertificate() + if err != nil { + return nil, nil, errs.Wrap(err) + } + pool := x509.NewCertPool() + pool.AddCert(cert) + certmagic.DefaultACME = certmagic.ACMEIssuer{ + CA: DEV_ACME_URL, + TestCA: DEV_ACME_URL, + Agreed: true, + TrustedRoots: pool, + } + return setupCertMagic(certStoragePath, conf, db, logger) +} diff --git a/backend/acme/certmagic_prod.go b/backend/acme/certmagic_prod.go new file mode 100644 index 0000000..e3a5b96 --- /dev/null +++ b/backend/acme/certmagic_prod.go @@ -0,0 +1,23 @@ +//go:build !dev + +package acme + +import ( + _ "embed" + + "github.com/caddyserver/certmagic" + "github.com/phishingclub/phishingclub/config" + "go.uber.org/zap" + "gorm.io/gorm" +) + +// SetupCertMagic creates a certmagic config for development +// and checks which domains are allowed from the db before getting a certificate +func SetupCertMagic( + certStoragePath string, + conf *config.Config, + db *gorm.DB, + logger *zap.SugaredLogger, +) (*certmagic.Config, *certmagic.Cache, error) { + return setupCertMagic(certStoragePath, conf, db, logger) +} diff --git a/backend/acme/pebble-config.json b/backend/acme/pebble-config.json new file mode 100644 index 0000000..d6ef0d5 --- /dev/null +++ b/backend/acme/pebble-config.json @@ -0,0 +1,20 @@ +{ + "pebble": { + "listenAddress": "0.0.0.0:14000", + "managementListenAddress": "0.0.0.0:15000", + "certificate": "test/certs/localhost/cert.pem", + "privateKey": "test/certs/localhost/key.pem", + "httpPort": 8000, + "tlsPort": 8001, + "ocspResponderURL": "", + "externalAccountBindingRequired": false, + "domainBlocklist": [ + "blocked-domain.example" + ], + "retryAfter": { + "authz": 3, + "order": 5 + }, + "certificateValidityPeriod": 157766400 + } +} \ No newline at end of file diff --git a/backend/acme/pebble.minica.pem b/backend/acme/pebble.minica.pem new file mode 100644 index 0000000..35388ee --- /dev/null +++ b/backend/acme/pebble.minica.pem @@ -0,0 +1,19 @@ +-----BEGIN CERTIFICATE----- +MIIDCTCCAfGgAwIBAgIIJOLbes8sTr4wDQYJKoZIhvcNAQELBQAwIDEeMBwGA1UE +AxMVbWluaWNhIHJvb3QgY2EgMjRlMmRiMCAXDTE3MTIwNjE5NDIxMFoYDzIxMTcx +MjA2MTk0MjEwWjAgMR4wHAYDVQQDExVtaW5pY2Egcm9vdCBjYSAyNGUyZGIwggEi +MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC5WgZNoVJandj43kkLyU50vzCZ +alozvdRo3OFiKoDtmqKPNWRNO2hC9AUNxTDJco51Yc42u/WV3fPbbhSznTiOOVtn +Ajm6iq4I5nZYltGGZetGDOQWr78y2gWY+SG078MuOO2hyDIiKtVc3xiXYA+8Hluu +9F8KbqSS1h55yxZ9b87eKR+B0zu2ahzBCIHKmKWgc6N13l7aDxxY3D6uq8gtJRU0 +toumyLbdzGcupVvjbjDP11nl07RESDWBLG1/g3ktJvqIa4BWgU2HMh4rND6y8OD3 +Hy3H8MY6CElL+MOCbFJjWqhtOxeFyZZV9q3kYnk9CAuQJKMEGuN4GU6tzhW1AgMB +AAGjRTBDMA4GA1UdDwEB/wQEAwIChDAdBgNVHSUEFjAUBggrBgEFBQcDAQYIKwYB +BQUHAwIwEgYDVR0TAQH/BAgwBgEB/wIBADANBgkqhkiG9w0BAQsFAAOCAQEAF85v +d40HK1ouDAtWeO1PbnWfGEmC5Xa478s9ddOd9Clvp2McYzNlAFfM7kdcj6xeiNhF +WPIfaGAi/QdURSL/6C1KsVDqlFBlTs9zYfh2g0UXGvJtj1maeih7zxFLvet+fqll +xseM4P9EVJaQxwuK/F78YBt0tCNfivC6JNZMgxKF59h0FBpH70ytUSHXdz7FKwix +Mfn3qEb9BXSk0Q3prNV5sOV3vgjEtB4THfDxSz9z3+DepVnW3vbbqwEbkXdk3j82 +2muVldgOUgTwK8eT+XdofVdntzU/kzygSAtAQwLJfn51fS1GvEcYGBc1bDryIqmF +p9BI7gVKtWSZYegicA== +-----END CERTIFICATE----- \ No newline at end of file diff --git a/backend/acme/selfsigned.go b/backend/acme/selfsigned.go new file mode 100644 index 0000000..8bd2903 --- /dev/null +++ b/backend/acme/selfsigned.go @@ -0,0 +1,187 @@ +package acme + +import ( + "crypto/rand" + "crypto/rsa" + "crypto/x509" + "crypto/x509/pkix" + "encoding/pem" + "math/big" + "net" + "os" + "path/filepath" + "time" + + "github.com/go-errors/errors" + "github.com/phishingclub/phishingclub/build" + "go.uber.org/zap" +) + +// Information is a struct for certificate information +type Information struct { + CommonName string + Organization []string + Country []string + Province []string + Locality []string + StreetAddress []string + PostalCode []string +} + +// NewInformation creates a new Information +func NewInformation( + commonName string, + organization []string, + country []string, + province []string, + locality []string, + streetAddress []string, + postalCode []string, +) Information { + return Information{ + Organization: organization, + Country: country, + Province: province, + Locality: locality, + StreetAddress: streetAddress, + PostalCode: postalCode, + } +} + +// NewInformationWithDefault creates a new Information with default values +func NewInformationWithDefault() Information { + return NewInformation( + "", + []string{""}, + []string{""}, + []string{""}, + []string{""}, + []string{""}, + []string{""}, + ) +} + +// CreateSelfSignedCert creates a self signed certificate with provided hostnames +func CreateSelfSignedCert( + logger *zap.SugaredLogger, + info Information, + hostnames []string, + publicPath string, + privatePath string, +) error { + // Process hostnames into IP addresses and DNS names + var ipAddresses []net.IP + var dnsNames []string + + if !build.Flags.Production { + ipAddresses = append(ipAddresses, net.IPv4(127, 0, 0, 1), net.IPv6loopback) + dnsNames = append(dnsNames, "localhost") + } + + for _, h := range hostnames { + if ip := net.ParseIP(h); ip != nil { + ipAddresses = append(ipAddresses, ip) + } else { + dnsNames = append(dnsNames, h) + } + } + + // Use info.CommonName if provided, otherwise use first hostname or "localhost" + commonName := info.CommonName + if commonName == "" || commonName == "127.0.0.1" { + if len(hostnames) > 0 { + commonName = hostnames[0] + } else { + commonName = "localhost" + } + } + + // Create certificate with appropriate SAN extensions + serialNumberLimit := new(big.Int).Lsh(big.NewInt(1), 128) + serialNumber, err := rand.Int(rand.Reader, serialNumberLimit) + if err != nil { + return errors.Errorf("failed to generate serial number: %s", err) + } + + cert := &x509.Certificate{ + SerialNumber: serialNumber, + Subject: pkix.Name{ + CommonName: commonName, + Organization: info.Organization, + Country: info.Country, + Province: info.Province, + Locality: info.Locality, + StreetAddress: info.StreetAddress, + PostalCode: info.PostalCode, + }, + IPAddresses: ipAddresses, + DNSNames: dnsNames, + NotBefore: time.Now(), + NotAfter: time.Now().AddDate(10, 0, 0), + SubjectKeyId: []byte{0, 0, 0, 0, 0}, + ExtKeyUsage: []x509.ExtKeyUsage{x509.ExtKeyUsageClientAuth, x509.ExtKeyUsageServerAuth}, + KeyUsage: x509.KeyUsageDigitalSignature | x509.KeyUsageKeyEncipherment, + BasicConstraintsValid: true, + } + + certPrivKey, err := rsa.GenerateKey(rand.Reader, 2048) + if err != nil { + return errors.Errorf("failed to generate private key: %s", err) + } + + certBytes, err := x509.CreateCertificate(rand.Reader, cert, cert, &certPrivKey.PublicKey, certPrivKey) + if err != nil { + return errors.Errorf("failed to create certificate: %s", err) + } + + // Create directories if they don't exist + certDir := filepath.Dir(publicPath) + if err := os.MkdirAll(certDir, 0750); err != nil { + return errors.Errorf("failed to create certificate directory: %s", err) + } + + keyDir := filepath.Dir(privatePath) + if err := os.MkdirAll(keyDir, 0750); err != nil { + return errors.Errorf("failed to create key directory: %s", err) + } + + // Write certificate + // #nosec + certOut, err := os.Create(publicPath) + if err != nil { + return errors.Errorf("failed to open certificate file for writing: %s", err) + } + defer certOut.Close() + + if err := pem.Encode(certOut, &pem.Block{Type: "CERTIFICATE", Bytes: certBytes}); err != nil { + return errors.Errorf("failed to write certificate: %s", err) + } + + // Write private key + // #nosec + keyOut, err := os.OpenFile(privatePath, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0600) + if err != nil { + return errors.Errorf("failed to open key file for writing: %s", err) + } + defer keyOut.Close() + + privBlock := &pem.Block{ + Type: "RSA PRIVATE KEY", + Bytes: x509.MarshalPKCS1PrivateKey(certPrivKey), + } + + if err := pem.Encode(keyOut, privBlock); err != nil { + return errors.Errorf("failed to write private key: %s", err) + } + /* + logger.Debugf("generated self-signed certificate", + "certificate", publicPath, + "key", privatePath, + "common_name", commonName, + "ip_addresses", ipAddresses, + "dns_names", dnsNames, + ) + */ + + return nil +} diff --git a/backend/api/response.go b/backend/api/response.go new file mode 100644 index 0000000..b1b357d --- /dev/null +++ b/backend/api/response.go @@ -0,0 +1,173 @@ +package api + +import ( + "fmt" + "net/http" + + "github.com/go-errors/errors" + + "github.com/gin-gonic/gin" +) + +const ( + // All constant here are used for frontend responses + NotFound = "Not found" + InvalidData = "Missing or invalid data" + Unauthorized = "Authorization failed" + Forbidden = "Access denied" + ServerError = "Internal server error" + InvalidCompanyID = "Invalid company ID" + InvalidDomainID = "Invalid domain ID" + InvalidMessageID = "Invalid message ID" + InvalidPageID = "Invalid page ID" + InvalidPageTypeID = "Invalid page type ID" + InvalidRecipientID = "Invalid recipient ID" + InvalidRecipientGroupID = "Invalid recipient group ID" + InvalidSMTPConfigurationID = "Invalid SMTP configuration ID" + CompanyNotFound = "Company not found" +) + +// JSONResponse is the response structure for the API +type JSONResponse struct { + Success bool `json:"success"` + Data any `json:"data"` + Error string `json:"error"` +} + +// JSONResponseHandler is a interface for API responses +type JSONResponseHandler interface { + OK(g *gin.Context, data any) + NotFound(g *gin.Context) + Unauthorized(g *gin.Context) + Forbidden(g *gin.Context) + BadRequest(g *gin.Context) + BadRequestMessage(g *gin.Context, message string) + ValidationFailed(g *gin.Context, field string, err error) + ServerError(g *gin.Context) + ServerErrorMessage(g *gin.Context, message string) +} + +// jsonResponseHandler is a JSON API responder +type jsonResponseHandler struct{} + +// NewJSONResponseHandler creates a new JSON responder +func NewJSONResponseHandler() JSONResponseHandler { + return &jsonResponseHandler{} +} + +// newResponse creates a new JSON response +func (r *jsonResponseHandler) newResponse( + success bool, + data any, + errorMessage string, +) JSONResponse { + return JSONResponse{ + Success: success, + Data: data, + Error: errorMessage, + } +} + +// newOK creates a new OK response +func (r *jsonResponseHandler) newOK(data any) JSONResponse { + return r.newResponse(true, data, "") +} + +// newError creates a new error response +func (r *jsonResponseHandler) newError(errorMessage string) JSONResponse { + return r.newResponse(false, nil, errorMessage) +} + +// OK responds with 200 - OK +func (r *jsonResponseHandler) OK(g *gin.Context, data any) { + g.JSON(http.StatusOK, r.newOK(data)) +} + +// NotFound responds 404 - NOT FOUND +func (r *jsonResponseHandler) NotFound(g *gin.Context) { + g.JSON( + http.StatusNotFound, + r.newError(NotFound), + ) + g.Abort() +} + +// Unauthorized responds with 401 - UNAUTHORIZED +// generic error handler for authentication errors +func (r *jsonResponseHandler) Unauthorized(g *gin.Context) { + g.JSON( + http.StatusUnauthorized, + r.newError(Forbidden), + ) + g.Abort() +} + +// Forbidden responds with 403 - FORBIDDEN and a custom error message +// generic error handler for authorization errors +func (r *jsonResponseHandler) Forbidden(g *gin.Context) { + g.JSON( + http.StatusForbidden, + r.newError(Unauthorized), + ) + g.Abort() +} + +// BadRequest responds with 400 - BAD REQUEST +func (r *jsonResponseHandler) BadRequest(g *gin.Context) { + g.JSON( + http.StatusBadRequest, + r.newError(InvalidData), + ) + g.Abort() +} + +// BadRequestMessage responds with 400 - BAD REQUEST and a custom error message +func (r *jsonResponseHandler) BadRequestMessage(g *gin.Context, message string) { + g.JSON( + http.StatusBadRequest, + r.newError(message), + ) + g.Abort() +} + +func (r *jsonResponseHandler) unwrapErrorMessage(err error) string { + message := err.Error() + unwrapped := errors.Unwrap(err) + if unwrapped != nil { + message = r.unwrapErrorMessage(unwrapped) + } + return message +} + +// ValidationFailed responds with 400 - BAD REQUEST and a validation error message +// that includes the field name and the validation error message +// if the err IS a ValidationError it will unwrap the validation error +// else it will use the error passed +func (r *jsonResponseHandler) ValidationFailed(g *gin.Context, field string, err error) { + message := r.unwrapErrorMessage(err) + g.JSON( + http.StatusBadRequest, + r.newError( + fmt.Sprintf("%s %s", field, message), + ), + ) + g.Abort() +} + +// ServerError responds with 500 - INTERNAL SERVER ERROR +func (r *jsonResponseHandler) ServerError(g *gin.Context) { + g.JSON( + http.StatusInternalServerError, + r.newError(ServerError), + ) + g.Abort() +} + +// ServerError responds with 500 - INTERNAL SERVER ERROR and a custom error message +func (r *jsonResponseHandler) ServerErrorMessage(g *gin.Context, message string) { + g.JSON( + http.StatusInternalServerError, + r.newError(message), + ) + g.Abort() +} diff --git a/backend/app/administration.go b/backend/app/administration.go new file mode 100644 index 0000000..81c039a --- /dev/null +++ b/backend/app/administration.go @@ -0,0 +1,817 @@ +package app + +import ( + "context" + "crypto/tls" + "crypto/x509" + "embed" + "encoding/pem" + "fmt" + "io/fs" + "log" + "net" + "net/http" + "os" + "strings" + "time" + + "github.com/caddyserver/certmagic" + "github.com/gin-gonic/gin" + "github.com/phishingclub/phishingclub/acme" + "github.com/phishingclub/phishingclub/build" + "github.com/phishingclub/phishingclub/config" + "github.com/phishingclub/phishingclub/errs" + "github.com/phishingclub/phishingclub/frontend" + "github.com/phishingclub/phishingclub/server" + "go.uber.org/zap" +) + +const ( + // health + ROUTE_V1_HEALTH = "/api/v1/healthz" + ROUTE_V1_LIVE = "/api/v1/livez" + ROUTE_V1_READY = "/api/v1/readyz" + // application + ROUTE_V1_FEATURE = "/api/v1/features" + ROUTE_V1_UPDATE_AVAILABLE = "/api/v1/update/available" + ROUTE_V1_UPDATE_AVAILABLE_CACHED = "/api/v1/update/available/cached" + ROUTE_V1_UPDATE = "/api/v1/update" + // user + ROUTE_V1_USER = "/api/v1/user" + ROUTE_V1_USER_ID = "/api/v1/user/:id" + ROUTE_V1_USER_LOGIN = "/api/v1/user/login" + ROUTE_V1_USER_LOGOUT = "/api/v1/user/logout" + // #nosec + ROUTE_V1_USER_PASSWORD = "/api/v1/user/password" + ROUTE_V1_USER_USERNAME = "/api/v1/user/username" + ROUTE_V1_USER_FULLNAME = "/api/v1/user/fullname" + ROUTE_V1_USER_EMAIL = "/api/v1/user/email" + ROUTE_V1_USER_SESSIONS = "/api/v1/user/sessions" + ROUTE_V1_USER_SESSIONS_INVALIDATE = "/api/v1/user/sessions/invalidate" + ROUTE_V1_USER_API = "/api/v1/user/api" + // sso + ROUTE_V1_SSO_ENTRA_ID = "/api/v1/sso/entra-id" + ROUTE_V1_SSO_ENTRA_ID_ENABLED = "/api/v1/sso/entra-id/enabled" + ROUTE_V1_SSO_ENTRA_ID_LOGIN = "/api/v1/sso/entra-id/login" + ROUTE_V1_SSO_ENTRA_ID_CALLBACK = "/api/v1/sso/entra-id/auth" + // mfa + ROUTE_V1_USER_MFA_TOTP_SETUP = "/api/v1/user/mfa/totp/setup" + ROUTE_V1_USER_MFA_TOTP_SETUP_VERIFY = "/api/v1/user/mfa/totp/setup/verify" + ROUTE_V1_USER_MFA_TOTP_VERIFY = "/api/v1/user/mfa/totp/verify" + ROUTE_V1_USER_MFA_TOTP = "/api/v1/user/mfa/totp" + ROUTE_V1_QR_FROM_TOTP = "/api/v1/qr/totp" + ROUTE_V1_QR_URL_TO_HTML = "/api/v1/qr/html" + // session + ROUTE_V1_SESSION_ID = "/api/v1/session/:id" + ROUTE_V1_SESSION_PING = "/api/v1/session/ping" + // company + ROUTE_V1_COMPANY = "/api/v1/company" + ROUTE_V1_COMPANY_ID = "/api/v1/company/:id" + ROUTE_V1_COMPANY_ID_EXPORT = "/api/v1/company/:id/export" + ROUTE_V1_COMPANY_ID_EXPORT_SHARED = "/api/v1/company/shared/export" + // option + ROUTE_V1_OPTION = "/api/v1/option" + ROUTE_V1_OPTION_GET = "/api/v1/option/:key" + // installation + ROUTE_V1_INSTALL = "/api/v1/install" + // domain + ROUTE_V1_DOMAIN = "/api/v1/domain" + ROUTE_V1_DOMAIN_SUBSET = "/api/v1/domain/subset" + ROUTE_V1_DOMAIN_ID = "/api/v1/domain/:id" + ROUTE_V1_DOMAIN_NAME = "/api/v1/domain/name/:domain" + // page + ROUTE_V1_PAGE = "/api/v1/page" + ROUTE_V1_PAGE_OVERVIEW = "/api/v1/page/overview" + ROUTE_V1_PAGE_ID = "/api/v1/page/:id" + ROUTE_V1_PAGE_CONTENT_ID = "/api/v1/page/:id/content" + // recipient and groups + ROUTE_V1_RECIPIENT = "/api/v1/recipient" + ROUTE_V1_RECIPIENT_IMPORT = "/api/v1/recipient/import" + ROUTE_V1_RECIPIENT_EXPORT = "/api/v1/recipient/:id/export" + ROUTE_V1_RECIPIENT_ID = "/api/v1/recipient/:id" + ROUTE_V1_RECIPIENT_ID_EVENTS = "/api/v1/recipient/:id/events" + ROUTE_V1_RECIPIENT_ID_STATS = "/api/v1/recipient/:id/stats" + ROUTE_V1_RECIPIENT_REPEAT_OFFENDERS = "/api/v1/recipient/repeat-offenders" + ROUTE_V1_RECIPIENT_GROUP = "/api/v1/recipient/group" + ROUTE_V1_RECIPIENT_GROUP_ID = "/api/v1/recipient/group/:id" + ROUTE_V1_RECIPIENT_GROUP_ID_IMPORT = "/api/v1/recipient/group/:id/import" + ROUTE_V1_RECIPIENT_GROUP_RECIPIENTS = "/api/v1/recipient/group/:id/recipients" + // logging + ROUTE_V1_LOG = "/api/v1/log" + ROUTE_V1_LOG_TEST = "/api/v1/log/test" + // smtp configuration + ROUTE_V1_SMTP_CONFIGURATION = "/api/v1/smtp-configuration" + ROUTE_V1_SMTP_CONFIGURATION_ID = "/api/v1/smtp-configuration/:id" + ROUTE_V1_SMTP_CONFIGURATION_ID_TEST_EMAIL = "/api/v1/smtp-configuration/:id/test-email" + ROUTE_V1_SMTP_CONFIGURATION_HEADERS = "/api/v1/smtp-configuration/:id/header" + ROUTE_V1_SMTP_HEADER_ID = "/api/v1/smtp-configuration/:id/header/:headerID" + // email + ROUTE_V1_EMAIL = "/api/v1/email" + ROUTE_V1_EMAIL_OVERVIEW = "/api/v1/email/overview" + ROUTE_V1_EMAIL_ID = "/api/v1/email/:id" + ROUTE_V1_EMAIL_SEND_TEST = "/api/v1/email/:id/send-test" + ROUTE_V1_EMAIL_CONTENT_ID = "/api/v1/email/:id/content" + // campaign + ROUTE_V1_CAMPAIGN_TEMPLATE = "/api/v1/campaign/template" + ROUTE_v1_CAMPAIGN_TEMPLATE_ID = "/api/v1/campaign/template/:id" + ROUTE_V1_CAMPAIGN = "/api/v1/campaign" + ROUTE_V1_CAMPAIGN_CALENDAR = "/api/v1/campaign/calendar" + ROUTE_V1_CAMPAIGN_ACTIVE = "/api/v1/campaign/active" + ROUTE_V1_CAMPAIGN_UPCOMING = "/api/v1/campaign/upcoming" + ROUTE_V1_CAMPAIGN_FINISHED = "/api/v1/campaign/finished" + ROUTE_V1_CAMPAIGN_CLOSE = "/api/v1/campaign/:id/close" + ROUTE_V1_CAMPAIGN_EXPORT_EVENTS = "/api/v1/campaign/:id/export/events" + ROUTE_V1_CAMPAIGN_EXPORT_SUBMISSIONS = "/api/v1/campaign/:id/export/submissions" + ROUTE_V1_CAMPAIGN_ANONYMIZE = "/api/v1/campaign/:id/anonymize" + ROUTE_V1_CAMPAIGN_ID = "/api/v1/campaign/:id" + ROUTE_V1_CAMPAIGN_NAME = "/api/v1/campaign/name/:name" + ROUTE_V1_CAMPAIGN_RECIPIENTS = "/api/v1/campaign/:id/recipients" + ROUTE_V1_CAMPAIGN_RESULT_STATS = "/api/v1/campaign/:id/statistics" + ROUTE_V1_CAMPAIGN_EVENTS = "/api/v1/campaign/:id/events" + ROUTE_V1_CAMPAIGN_EVENT_NAMES = "/api/v1/campaign/event-types" + ROUTE_V1_CAMPAIGN_STATS = "/api/v1/campaign/statistics" + ROUTE_V1_CAMPAIGN_STATS_ID = "/api/v1/campaign/:id/stats" + ROUTE_V1_CAMPAIGN_STATS_ALL = "/api/v1/campaign/stats/all" + // campaign-recipient + ROUTE_V1_CAMPAIGN_RECIPIENT_EMAIL = "/api/v1/campaign/recipient/:id/email" + ROUTE_V1_CAMPAIGN_RECIPIENT_URL = "/api/v1/campaign/recipient/:id/url" + ROUTE_V1_CAMPAIGN_RECIPIENT_SET_SENT = "/api/v1/campaign/recipient/:id/sent" + // asset + ROUTE_V1_ASSET = "/api/v1/asset" + ROUTE_V1_ASSET_ID = "/api/v1/asset/:id" + ROUTE_V1_ASSET_DOMAIN_CONTEXT = "/api/v1/asset/domain/:domain" + ROUTE_V1_ASSET_GLOBAL_CONTEXT = "/api/v1/asset/domain/" + ROUTE_V1_ASSET_DOMAIN_VIEW = "/api/v1/asset/view/domain/:domain/*path" + // attachments + ROUTE_V1_ATTACHMENT = "/api/v1/attachment" + ROUTE_V1_ATTACHMENT_ID = "/api/v1/attachment/:id" + ROUTE_V1_ATTACHMENT_ID_CONTENT = "/api/v1/attachment/:id/content" + ROUTE_V1_ATTACHMENT_COMPANY_CONTEXT = "/api/v1/attachment/company/:companyID" + ROUTE_V1_ATTACHMENT_GLOBAL_CONTEXT = "/api/v1/attachment/company/" + ROUTE_V1_EMAIL_ATTACHMENT = "/api/v1/email/:id/attachment" + // api sender + ROUTE_V1_API_SENDER = "/api/v1/api-sender" + ROUTE_V1_API_SENDER_OVERVIEW = "/api/v1/api-sender/overview" + ROUTE_V1_API_SENDER_ID = "/api/v1/api-sender/:id" + ROUTE_V1_API_SENDER_ID_TEST = "/api/v1/api-sender/:id/test" + // deny allow + ROUTE_V1_ALLOW_DENY = "/api/v1/allow-deny" + ROUTE_V1_ALLOW_DENY_OVERVIEW = "/api/v1/allow-deny/overview" + ROUTE_V1_ALLOW_DENY_ID = "/api/v1/allow-deny/:id" + // web hooks + ROUTE_V1_WEBHOOK = "/api/v1/webhook" + ROUTE_V1_WEBHOOK_ID = "/api/v1/webhook/:id" + ROUTE_V1_WEBHOOK_ID_TEST = "/api/v1/webhook/:id/test" + // identifiers + ROUTE_V1_IDENTIFIER = "/api/v1/identifier" + // license + ROUTE_V1_LICENSE = "/api/v1/license" + // version + ROUTE_V1_VERSION = "/api/v1/version" + // import + ROUTE_V1_IMPORT = "/api/v1/import" +) + +// administrationServer is the administrationServer app +type administrationServer struct { + Server *http.Server + router *gin.Engine + logger *zap.SugaredLogger + production bool + embedBackendFS *embed.FS + certMagicConfig *certmagic.Config +} + +// NewAdministrationServer creates a new administration app +func NewAdministrationServer( + router *gin.Engine, + controllers *Controllers, + middlewares *Middlewares, + logger *zap.SugaredLogger, + certMagicConfig *certmagic.Config, + production bool, +) *administrationServer { + router = setupRoutes(router, controllers, middlewares) + + return &administrationServer{ + router: router, + logger: logger, + production: production, + certMagicConfig: certMagicConfig, + } +} + +func (a *administrationServer) Router() *gin.Engine { + return a.router +} + +// setupRoutes sets up the routes for the administration app +func setupRoutes( + r *gin.Engine, + controllers *Controllers, + middleware *Middlewares, +) *gin.Engine { + + if !build.Flags.Production { + r. + GET("/api/v1/_debug/panic", middleware.SessionHandler, controllers.Log.Panic). + GET("/api/v1/_debug/slow", middleware.SessionHandler, controllers.Log.Slow) + } + + r. + // log + GET(ROUTE_V1_LOG, middleware.SessionHandler, controllers.Log.GetLevel). + POST(ROUTE_V1_LOG, middleware.SessionHandler, controllers.Log.SetLevel). + GET(ROUTE_V1_LOG_TEST, middleware.SessionHandler, controllers.Log.TestLog). + // application + GET(ROUTE_V1_UPDATE_AVAILABLE, middleware.SessionHandler, controllers.Update.CheckForUpdate). + GET(ROUTE_V1_UPDATE_AVAILABLE_CACHED, middleware.SessionHandler, controllers.Update.CheckForUpdateCached). + // health + GET(ROUTE_V1_HEALTH, controllers.Health.Health). + GET(ROUTE_V1_LIVE, controllers.Health.Health). + GET(ROUTE_V1_READY, controllers.Health.Health). + // login, logout and session + GET(ROUTE_V1_SESSION_PING, middleware.SessionHandler, controllers.User.SessionPing). + POST(ROUTE_V1_USER_LOGIN, middleware.LoginRateLimiter, controllers.User.Login). + POST(ROUTE_V1_USER_LOGOUT, controllers.User.Logout). + // install + POST(ROUTE_V1_INSTALL, middleware.SessionHandler, controllers.Installer.Install). + // user + GET(ROUTE_V1_USER, middleware.SessionHandler, controllers.User.GetAll). + GET(ROUTE_V1_USER_ID, middleware.SessionHandler, controllers.User.GetByID). + POST(ROUTE_V1_USER_ID, middleware.SessionHandler, controllers.User.UpdateByID). + POST(ROUTE_V1_USER, middleware.SessionHandler, controllers.User.Create). + DELETE(ROUTE_V1_USER_ID, middleware.SessionHandler, controllers.User.Delete). + POST(ROUTE_V1_USER_PASSWORD, middleware.SessionHandler, controllers.User.ChangePasswordOnLoggedInUser). + POST(ROUTE_V1_USER_USERNAME, middleware.SessionHandler, controllers.User.ChangeUsernameOnLoggedInUser). + POST(ROUTE_V1_USER_FULLNAME, middleware.SessionHandler, controllers.User.ChangeFullnameOnLoggedInUser). + POST(ROUTE_V1_USER_EMAIL, middleware.SessionHandler, controllers.User.ChangeEmailOnLoggedInUser). + GET(ROUTE_V1_USER_SESSIONS, middleware.SessionHandler, controllers.User.GetSessionsOnLoggedInUser). + POST(ROUTE_V1_USER_SESSIONS_INVALIDATE, middleware.SessionHandler, controllers.User.InvalidateAllSessionByUserID). + DELETE(ROUTE_V1_SESSION_ID, middleware.SessionHandler, controllers.User.ExpireSessionByID). + GET(ROUTE_V1_USER_API, middleware.SessionHandler, controllers.User.GetMaskedAPIKey). + POST(ROUTE_V1_USER_API, middleware.SessionHandler, controllers.User.UpsertAPIKey). + DELETE(ROUTE_V1_USER_API, middleware.SessionHandler, controllers.User.RemoveAPIKey). + // sso + GET(ROUTE_V1_SSO_ENTRA_ID_ENABLED, controllers.SSO.IsEnabled). + POST(ROUTE_V1_SSO_ENTRA_ID, middleware.SessionHandler, controllers.SSO.Upsert). + GET(ROUTE_V1_SSO_ENTRA_ID_LOGIN, controllers.SSO.EntreIDLogin). + GET(ROUTE_V1_SSO_ENTRA_ID_CALLBACK, controllers.SSO.EntreIDCallBack). + // user mfa + GET(ROUTE_V1_USER_MFA_TOTP, middleware.SessionHandler, controllers.User.IsTOTPEnabled). + POST(ROUTE_V1_USER_MFA_TOTP_SETUP, middleware.LoginRateLimiter, middleware.SessionHandler, controllers.User.SetupTOTP). + POST(ROUTE_V1_USER_MFA_TOTP_SETUP_VERIFY, middleware.LoginRateLimiter, middleware.SessionHandler, controllers.User.SetupVerifyTOTP). + POST(ROUTE_V1_USER_MFA_TOTP_VERIFY, middleware.LoginRateLimiter, middleware.SessionHandler, controllers.User.VerifyTOTP). + POST(ROUTE_V1_USER_MFA_TOTP, middleware.LoginRateLimiter, middleware.SessionHandler, controllers.User.DisableTOTP). + // qr + POST(ROUTE_V1_QR_FROM_TOTP, middleware.SessionHandler, controllers.QR.ToTOTPURL). + POST(ROUTE_V1_QR_URL_TO_HTML, middleware.SessionHandler, controllers.QR.ToHTML). + // company + POST(ROUTE_V1_COMPANY, middleware.SessionHandler, controllers.Company.Create). + POST(ROUTE_V1_COMPANY_ID, middleware.SessionHandler, controllers.Company.ChangeName). + GET(ROUTE_V1_COMPANY, middleware.SessionHandler, controllers.Company.GetAll). + GET(ROUTE_V1_COMPANY_ID_EXPORT, middleware.SessionHandler, controllers.Company.ExportByCompanyID). + GET(ROUTE_V1_COMPANY_ID_EXPORT_SHARED, middleware.SessionHandler, controllers.Company.ExportShared). + GET(ROUTE_V1_COMPANY_ID, middleware.SessionHandler, controllers.Company.GetByID). + DELETE(ROUTE_V1_COMPANY_ID, middleware.SessionHandler, controllers.Company.DeleteByID). + // options + GET(ROUTE_V1_OPTION_GET, middleware.SessionHandler, controllers.Option.Get). + POST(ROUTE_V1_OPTION, middleware.SessionHandler, middleware.SessionHandler, controllers.Option.Update). + // domain + GET(ROUTE_V1_DOMAIN, middleware.SessionHandler, controllers.Domain.GetAll). + GET(ROUTE_V1_DOMAIN_SUBSET, middleware.SessionHandler, controllers.Domain.GetAllOverview). + GET(ROUTE_V1_DOMAIN_ID, middleware.SessionHandler, controllers.Domain.GetByID). + GET(ROUTE_V1_DOMAIN_NAME, middleware.SessionHandler, controllers.Domain.GetByName). + POST(ROUTE_V1_DOMAIN, middleware.SessionHandler, controllers.Domain.Create). + POST(ROUTE_V1_DOMAIN_ID, middleware.SessionHandler, controllers.Domain.UpdateByID). + DELETE(ROUTE_V1_DOMAIN_ID, middleware.SessionHandler, controllers.Domain.DeleteByID). + // recipient + GET(ROUTE_V1_RECIPIENT, middleware.SessionHandler, controllers.Recipient.GetAll). + GET(ROUTE_V1_RECIPIENT_ID, middleware.SessionHandler, controllers.Recipient.GetByID). + GET(ROUTE_V1_RECIPIENT_ID_EVENTS, middleware.SessionHandler, controllers.Recipient.GetCampaignEvents). + GET(ROUTE_V1_RECIPIENT_ID_STATS, middleware.SessionHandler, controllers.Recipient.GetStatsByID). + POST(ROUTE_V1_RECIPIENT, middleware.SessionHandler, controllers.Recipient.Create). + POST(ROUTE_V1_RECIPIENT_IMPORT, middleware.SessionHandler, controllers.Recipient.Import). + GET(ROUTE_V1_RECIPIENT_EXPORT, middleware.SessionHandler, controllers.Recipient.Export). + PATCH(ROUTE_V1_RECIPIENT_ID, middleware.SessionHandler, controllers.Recipient.UpdateByID). + DELETE(ROUTE_V1_RECIPIENT_ID, middleware.SessionHandler, controllers.Recipient.DeleteByID). + GET(ROUTE_V1_RECIPIENT_REPEAT_OFFENDERS, middleware.SessionHandler, controllers.Recipient.GetRepeatOffenderCount). + // recipient group + GET(ROUTE_V1_RECIPIENT_GROUP, middleware.SessionHandler, controllers.RecipientGroup.GetAll). + GET(ROUTE_V1_RECIPIENT_GROUP_ID, middleware.SessionHandler, controllers.RecipientGroup.GetByID). + GET(ROUTE_V1_RECIPIENT_GROUP_RECIPIENTS, middleware.SessionHandler, controllers.RecipientGroup.GetRecipientsByGroupID). + POST(ROUTE_V1_RECIPIENT_GROUP_RECIPIENTS, middleware.SessionHandler, controllers.RecipientGroup.AddRecipients). + DELETE(ROUTE_V1_RECIPIENT_GROUP_RECIPIENTS, middleware.SessionHandler, controllers.RecipientGroup.RemoveRecipients). + POST(ROUTE_V1_RECIPIENT_GROUP, middleware.SessionHandler, controllers.RecipientGroup.Create). + PATCH(ROUTE_V1_RECIPIENT_GROUP_ID, middleware.SessionHandler, controllers.RecipientGroup.UpdateByID). + PUT(ROUTE_V1_RECIPIENT_GROUP_ID_IMPORT, middleware.SessionHandler, controllers.RecipientGroup.Import). + DELETE(ROUTE_V1_RECIPIENT_GROUP_ID, middleware.SessionHandler, controllers.RecipientGroup.DeleteByID). + // page + GET(ROUTE_V1_PAGE, middleware.SessionHandler, controllers.Page.GetAll). + GET(ROUTE_V1_PAGE_OVERVIEW, middleware.SessionHandler, controllers.Page.GetOverview). + GET(ROUTE_V1_PAGE_ID, middleware.SessionHandler, controllers.Page.GetByID). + Any(ROUTE_V1_PAGE_CONTENT_ID, middleware.SessionHandler, controllers.Page.GetContentByID). + POST(ROUTE_V1_PAGE, middleware.SessionHandler, controllers.Page.Create). + PATCH(ROUTE_V1_PAGE_ID, middleware.SessionHandler, controllers.Page.UpdateByID). + DELETE(ROUTE_V1_PAGE_ID, middleware.SessionHandler, controllers.Page.DeleteByID). + // smtp configuration + GET(ROUTE_V1_SMTP_CONFIGURATION, middleware.SessionHandler, controllers.SMTPConfiguration.GetAll). + GET(ROUTE_V1_SMTP_CONFIGURATION_ID, middleware.SessionHandler, controllers.SMTPConfiguration.GetByID). + POST(ROUTE_V1_SMTP_CONFIGURATION, middleware.SessionHandler, controllers.SMTPConfiguration.Create). + POST(ROUTE_V1_SMTP_CONFIGURATION_ID_TEST_EMAIL, middleware.SessionHandler, controllers.SMTPConfiguration.TestEmail). + PATCH(ROUTE_V1_SMTP_CONFIGURATION_ID, middleware.SessionHandler, controllers.SMTPConfiguration.UpdateByID). + DELETE(ROUTE_V1_SMTP_CONFIGURATION_ID, middleware.SessionHandler, controllers.SMTPConfiguration.DeleteByID). + // smtp configuration headers + PATCH(ROUTE_V1_SMTP_CONFIGURATION_HEADERS, middleware.SessionHandler, controllers.SMTPConfiguration.AddHeader). + DELETE(ROUTE_V1_SMTP_HEADER_ID, middleware.SessionHandler, controllers.SMTPConfiguration.RemoveHeader). + // emails + GET(ROUTE_V1_EMAIL, middleware.SessionHandler, controllers.Email.GetAll). + GET(ROUTE_V1_EMAIL_OVERVIEW, middleware.SessionHandler, controllers.Email.GetOverviews). + GET(ROUTE_V1_EMAIL_ID, middleware.SessionHandler, controllers.Email.GetByID). + GET(ROUTE_V1_EMAIL_CONTENT_ID, middleware.SessionHandler, controllers.Email.GetContentByID). + POST(ROUTE_V1_EMAIL_SEND_TEST, middleware.SessionHandler, controllers.Email.SendTestEmail). + POST(ROUTE_V1_EMAIL, middleware.SessionHandler, controllers.Email.Create). + // TODO PATCH + POST(ROUTE_V1_EMAIL_ID, middleware.SessionHandler, controllers.Email.UpdateByID). + DELETE(ROUTE_V1_EMAIL_ID, middleware.SessionHandler, controllers.Email.DeleteByID). + // email attachments + POST(ROUTE_V1_EMAIL_ATTACHMENT, middleware.SessionHandler, controllers.Email.AddAttachments). + DELETE(ROUTE_V1_EMAIL_ATTACHMENT, middleware.SessionHandler, controllers.Email.RemoveAttachment). + // campaign templates + GET(ROUTE_V1_CAMPAIGN_TEMPLATE, middleware.SessionHandler, controllers.CampaignTemplate.GetAll). + GET(ROUTE_v1_CAMPAIGN_TEMPLATE_ID, middleware.SessionHandler, controllers.CampaignTemplate.GetByID). + // TODO PATCH + POST(ROUTE_V1_CAMPAIGN_TEMPLATE, middleware.SessionHandler, controllers.CampaignTemplate.Create). + POST(ROUTE_v1_CAMPAIGN_TEMPLATE_ID, middleware.SessionHandler, controllers.CampaignTemplate.UpdateByID). + DELETE(ROUTE_v1_CAMPAIGN_TEMPLATE_ID, middleware.SessionHandler, controllers.CampaignTemplate.DeleteByID). + // campaigns + GET(ROUTE_V1_CAMPAIGN, middleware.SessionHandler, controllers.Campaign.GetAll). + GET(ROUTE_V1_CAMPAIGN_CALENDAR, middleware.SessionHandler, controllers.Campaign.GetAllWithinDates). + GET(ROUTE_V1_CAMPAIGN_ACTIVE, middleware.SessionHandler, controllers.Campaign.GetAllActive). + GET(ROUTE_V1_CAMPAIGN_UPCOMING, middleware.SessionHandler, controllers.Campaign.GetAllUpcoming). + GET(ROUTE_V1_CAMPAIGN_FINISHED, middleware.SessionHandler, controllers.Campaign.GetAllFinished). + GET(ROUTE_V1_CAMPAIGN_EVENT_NAMES, middleware.SessionHandler, controllers.Campaign.GetAllEventTypes). + GET(ROUTE_V1_CAMPAIGN_EVENTS, middleware.SessionHandler, controllers.Campaign.GetEventsByCampaignID). + GET(ROUTE_V1_CAMPAIGN_STATS, middleware.SessionHandler, controllers.Campaign.GetStats). + GET(ROUTE_V1_CAMPAIGN_RESULT_STATS, middleware.SessionHandler, controllers.Campaign.GetResultStats). + GET(ROUTE_V1_CAMPAIGN_STATS_ID, middleware.SessionHandler, controllers.Campaign.GetCampaignStats). + GET(ROUTE_V1_CAMPAIGN_STATS_ALL, middleware.SessionHandler, controllers.Campaign.GetAllCampaignStats). + GET(ROUTE_V1_CAMPAIGN_ID, middleware.SessionHandler, controllers.Campaign.GetByID). + GET(ROUTE_V1_CAMPAIGN_NAME, middleware.SessionHandler, controllers.Campaign.GetByName). + POST(ROUTE_V1_CAMPAIGN, middleware.SessionHandler, controllers.Campaign.Create). + // TODO PATCH + POST(ROUTE_V1_CAMPAIGN_ID, middleware.SessionHandler, controllers.Campaign.UpdateByID). + POST(ROUTE_V1_CAMPAIGN_CLOSE, middleware.SessionHandler, controllers.Campaign.CloseCampaignByID). + GET(ROUTE_V1_CAMPAIGN_EXPORT_EVENTS, middleware.SessionHandler, controllers.Campaign.ExportEventsAsCSV). + GET(ROUTE_V1_CAMPAIGN_EXPORT_SUBMISSIONS, middleware.SessionHandler, controllers.Campaign.ExportSubmissionsAsCSV). + POST(ROUTE_V1_CAMPAIGN_ANONYMIZE, middleware.SessionHandler, controllers.Campaign.AnonymizeByID). + DELETE(ROUTE_V1_CAMPAIGN_ID, middleware.SessionHandler, controllers.Campaign.DeleteByID). + // campaign-recipient + GET(ROUTE_V1_CAMPAIGN_RECIPIENTS, middleware.SessionHandler, controllers.Campaign.GetRecipientsByCampaignID). + GET(ROUTE_V1_CAMPAIGN_RECIPIENT_EMAIL, middleware.SessionHandler, controllers.Campaign.GetCampaignEmail). + GET(ROUTE_V1_CAMPAIGN_RECIPIENT_URL, middleware.SessionHandler, controllers.Campaign.GetCampaignURL). + POST(ROUTE_V1_CAMPAIGN_RECIPIENT_SET_SENT, middleware.SessionHandler, controllers.Campaign.SetSentAtByCampaignRecipientID). + // asset + GET(ROUTE_V1_ASSET_DOMAIN_VIEW, middleware.SessionHandler, controllers.Asset.GetContentByID). + GET(ROUTE_V1_ASSET_ID, middleware.SessionHandler, controllers.Asset.GetByID). + PATCH(ROUTE_V1_ASSET_ID, middleware.SessionHandler, controllers.Asset.UpdateByID). + GET(ROUTE_V1_ASSET_DOMAIN_CONTEXT, middleware.SessionHandler, controllers.Asset.GetAllForContext). + GET(ROUTE_V1_ASSET_GLOBAL_CONTEXT, middleware.SessionHandler, controllers.Asset.GetAllForContext). + POST(ROUTE_V1_ASSET, middleware.SessionHandler, controllers.Asset.Create). + DELETE(ROUTE_V1_ASSET_ID, middleware.SessionHandler, controllers.Asset.RemoveByID). + // attachments + POST(ROUTE_V1_ATTACHMENT, middleware.SessionHandler, controllers.Attachment.Create). + GET(ROUTE_V1_ATTACHMENT_ID, middleware.SessionHandler, controllers.Attachment.GetByID). + GET(ROUTE_V1_ATTACHMENT_ID_CONTENT, middleware.SessionHandler, controllers.Attachment.GetContentByID). + GET(ROUTE_V1_ATTACHMENT, middleware.SessionHandler, controllers.Attachment.GetAllForContext). + PATCH(ROUTE_V1_ATTACHMENT_ID, middleware.SessionHandler, controllers.Attachment.UpdateByID). + DELETE(ROUTE_V1_ATTACHMENT_ID, middleware.SessionHandler, controllers.Attachment.RemoveByID). + // api sender + GET(ROUTE_V1_API_SENDER, middleware.SessionHandler, controllers.APISender.GetAll). + GET(ROUTE_V1_API_SENDER_OVERVIEW, middleware.SessionHandler, controllers.APISender.GetAllOverview). + GET(ROUTE_V1_API_SENDER_ID, middleware.SessionHandler, controllers.APISender.GetByID). + POST(ROUTE_V1_API_SENDER, middleware.SessionHandler, controllers.APISender.Create). + PATCH(ROUTE_V1_API_SENDER_ID, middleware.SessionHandler, controllers.APISender.UpdateByID). + POST(ROUTE_V1_API_SENDER_ID_TEST, middleware.SessionHandler, controllers.APISender.SendTest). + DELETE(ROUTE_V1_API_SENDER_ID, middleware.SessionHandler, controllers.APISender.DeleteByID). + // allow deny + GET(ROUTE_V1_ALLOW_DENY, middleware.SessionHandler, controllers.AllowDeny.GetAll). + GET(ROUTE_V1_ALLOW_DENY_OVERVIEW, middleware.SessionHandler, controllers.AllowDeny.GetAllOverview). + GET(ROUTE_V1_ALLOW_DENY_ID, middleware.SessionHandler, controllers.AllowDeny.GetByID). + POST(ROUTE_V1_ALLOW_DENY, middleware.SessionHandler, controllers.AllowDeny.Create). + PATCH(ROUTE_V1_ALLOW_DENY_ID, middleware.SessionHandler, controllers.AllowDeny.UpdateByID). + DELETE(ROUTE_V1_ALLOW_DENY_ID, middleware.SessionHandler, controllers.AllowDeny.DeleteByID). + // web hooks + GET(ROUTE_V1_WEBHOOK, middleware.SessionHandler, controllers.Webhook.GetAll). + GET(ROUTE_V1_WEBHOOK_ID, middleware.SessionHandler, controllers.Webhook.GetByID). + POST(ROUTE_V1_WEBHOOK, middleware.SessionHandler, controllers.Webhook.Create). + PATCH(ROUTE_V1_WEBHOOK_ID, middleware.SessionHandler, controllers.Webhook.UpdateByID). + DELETE(ROUTE_V1_WEBHOOK_ID, middleware.SessionHandler, controllers.Webhook.DeleteByID). + POST(ROUTE_V1_WEBHOOK_ID_TEST, middleware.SessionHandler, controllers.Webhook.SendTest). + // identifiers + GET(ROUTE_V1_IDENTIFIER, middleware.SessionHandler, controllers.Identifier.GetAll). + // version + GET(ROUTE_V1_VERSION, middleware.SessionHandler, controllers.Version.Get). + // update + GET(ROUTE_V1_UPDATE, middleware.SessionHandler, controllers.Update.GetUpdateDetails). + POST(ROUTE_V1_UPDATE, middleware.SessionHandler, controllers.Update.RunUpdate). + // import + POST(ROUTE_V1_IMPORT, middleware.SessionHandler, controllers.Import.Import) + + return r +} + +func (a *administrationServer) handleTLSCertificate( + conf *config.Config, +) error { + publicCertExists := true + privateCertExists := true + if _, err := os.Stat(conf.TLSCertPath()); err != nil { + if !os.IsNotExist(err) { + return err + } + privateCertExists = false + } + if _, err := os.Stat(conf.TLSKeyPath()); err != nil { + if !os.IsNotExist(err) { + return err + } + publicCertExists = false + } + + // determine hostnames to include in the certificate + hostnames := []string{} + if h := conf.TLSHost(); len(h) > 0 { + hostnames = append(hostnames, h) + } + // get the address from config + if conf.AdminNetAddress() != "" { + host, _, err := net.SplitHostPort(conf.AdminNetAddress()) + if err == nil && host != "" && host != "0.0.0.0" && host != "::" { + hostnames = append(hostnames, host) + } + } + + // try to get all non-loopback IP addresses + addrs, err := net.InterfaceAddrs() + if err == nil { + for _, addr := range addrs { + if ipnet, ok := addr.(*net.IPNet); ok && !ipnet.IP.IsLoopback() { + ip := ipnet.IP + + // skip private IPs (RFC 1918) + if ip.IsPrivate() || ip.IsLoopback() || ip.IsLinkLocalUnicast() || ip.IsLinkLocalMulticast() { + continue + } + + // only add public IPs to the certificate + hostnames = append(hostnames, ip.String()) + } + } + } + + needToCreateCert := !privateCertExists || !publicCertExists + + // check if we need to recreate the certificate because host/IP has changed + if privateCertExists && publicCertExists { + // read the existing certificate to check the hostnames + certData, err := os.ReadFile(conf.TLSCertPath()) + if err == nil { + block, _ := pem.Decode(certData) + if block != nil && block.Type == "CERTIFICATE" { + cert, err := x509.ParseCertificate(block.Bytes) + if err == nil { + // vheck if all requested hostnames are in the certificate + missingHosts := false + hostMap := make(map[string]bool) + + // add all current certificate SANs to the map + for _, dnsName := range cert.DNSNames { + hostMap[dnsName] = true + } + for _, ip := range cert.IPAddresses { + hostMap[ip.String()] = true + } + + // check if the common name is in our hostnames + if cert.Subject.CommonName != "" { + hostMap[cert.Subject.CommonName] = true + } + + // Check if all requested hostnames are covered + for _, host := range hostnames { + if !hostMap[host] { + missingHosts = true + a.logger.Debugw("host not found in existing certificate", "host", host) + break + } + } + + // if the TLSHost is specified and not in the certificate, or other hosts are missing, regenerate + if missingHosts { + a.logger.Debug("recreating certificate due to changed host/IP configuration") + needToCreateCert = true + } + } else { + a.logger.Warnw("could not parse existing certificate, will recreate", "error", err) + needToCreateCert = true + } + } else { + a.logger.Warn("invalid certificate format, will recreate") + needToCreateCert = true + } + } else { + a.logger.Warnw("could not read existing certificate, will recreate", "error", err) + needToCreateCert = true + } + } + + // create certificates if needed + if needToCreateCert { + a.logger.Debug("creating self signed certificate for administration server") + + info := acme.NewInformationWithDefault() + if len(hostnames) > 0 { + info.CommonName = hostnames[0] + } + + a.logger.Debugw("generating certificate with hostnames", "hostnames", hostnames) + + err = acme.CreateSelfSignedCert( + a.logger, + info, + hostnames, + conf.TLSCertPath(), + conf.TLSKeyPath(), + ) + + if err != nil { + return fmt.Errorf("failed to create self signed certificate: %s", err) + } + + a.logger.Debugw( + "saved self signed certificate for administration servers", + "TLS certificate", conf.TLSCertPath(), + "TLS key path", conf.TLSKeyPath(), + ) + } else { + a.logger.Debug("using existing certificate for administration server") + } + + return nil +} + +// LoadFrontend loads the frontend +// if this is a production build, the fronten will be embedded +// else the routes will be setup to load the frontend resources on every request +func (a *administrationServer) LoadFrontend( + ln net.Listener, +) error { + if build.Flags.Production { + return a.loadEmbeddedFileSystem( + ln, + ) + } + return a.loadPerRequestLoading() +} + +// loadPerRequestLoading loads the frontend resources on every request +// this is only used in a dev enviroment using nodemon as is a +// backup if the current vite proxy stragegy does not work. +func (a *administrationServer) loadPerRequestLoading() error { + a.router.GET("/", func(c *gin.Context) { + c.File("./frontend/website/build/index.html") + }) + // perform manual lookup for the frontend files on each request + // build files might have been added or removed, so each request must + // do a check if the file exists + a.router.NoRoute(func(c *gin.Context) { + // a.logger.Infow("serving frontend file", "path", c.Request.URL.Path) + // check if the request url path exists in the root directory + if _, err := os.Stat("./frontend/website/build" + c.Request.URL.Path); err == nil { + c.File("./frontend/website/build" + c.Request.URL.Path) + return + } + // if the path ends with / or does not have a file extension, then it should fallback to index.html as + // it is a SPA path such as /company/foo/ + if c.Request.URL.Path[len(c.Request.URL.Path)-1:] == "/" || !strings.Contains(c.Request.URL.Path, ".") { + c.File("./frontend/website/build/index.html") + } + // file not found - return 404 + c.AbortWithStatus(http.StatusNotFound) + }) + return nil +} + +func (a *administrationServer) loadEmbeddedFileSystem( + ln net.Listener, +) error { + _ = ln + embedFS := frontend.GetEmbededFS() + // make embedded .html work + frontend.LoadHTMLFromEmbedFS(a.router, *embedFS, "build/*.html") + rootDir, err := embedFS.ReadDir("build") + if err != nil { + return errs.Wrap(err) + } + for _, entry := range rootDir { + path := entry.Name() + // add root files + if !entry.IsDir() { + // special case for the frontpage + if path == "index.html" { + a.router.GET("/", func(c *gin.Context) { + c.HTML(http.StatusOK, "build/index.html", nil) + }) + continue + } + // any file in the root folder gets server as a file + a.router.GET("/"+path, func(c *gin.Context) { + c.FileFromFS("build/"+path, http.FS(*embedFS)) + }) + continue + } + // add static folders + staticFS, err := fs.Sub(embedFS, "build/"+path) + if err != nil { + return errs.Wrap(err) + } + switch path { + case ".well-known": + fallthrough + case "_app": + a.router.StaticFS(path, http.FS(staticFS)) + } + } + // fall back to the root index.html + a.router.NoRoute(func(c *gin.Context) { + c.HTML(http.StatusOK, "build/index.html", nil) + }) + + return nil +} + +func (a *administrationServer) StartServer( + conf *config.Config, +) (chan server.StartupMessage, net.Listener, error) { + startupMessage := server.NewStartupMessageChannel() + ln, err := net.Listen("tcp", conf.AdminNetAddress()) + if err != nil { + return nil, nil, fmt.Errorf("failed to listen on %s due to: %s", conf.AdminNetAddress(), err) + } + err = a.LoadFrontend(ln) + if err != nil { + return nil, nil, errs.Wrap(err) + } + err = a.handleTLSCertificate(conf) + if err != nil { + return nil, nil, errs.Wrap(err) + } + + a.Server = &http.Server{ + Handler: a.router, + // The maximum duration for reading the entire request, including the request line, headers, and body + ReadTimeout: 15 * time.Second, + // The maximum duration for writing the entire response, including the response headers and body + WriteTimeout: 15 * time.Second, // Timeout for writing the response + // The maximum duration to wait for the next request when the connection is in the idle state + IdleTimeout: 10 * time.Second, + // The maximum duration for reading the request headers. + ReadHeaderTimeout: 2 * time.Second, + // Maximum size of request headers (512 KB) + MaxHeaderBytes: 1 << 19, + } + a.Server.ErrorLog = log.New( + &SkipFirstTlsToZapWriter{ + logger: a.logger, + serverPtr: a.Server, + }, "", 0, + ) + + a.logger.Debugw("TLS settings", + "certPath", conf.TLSCertPath(), + "certKeyPath", conf.TLSKeyPath(), + ) + + // start the administration server + adminHost := "admin.test" + err = a.certMagicConfig.ManageSync(context.Background(), []string{adminHost}) + if err != nil { + a.logger.Errorw("certmagic managesync failed", "error", err) + return nil, nil, errs.Wrap(err) + } + go func() { + if !conf.TLSAuto() { + a.logger.Debugw("starting administration", + "address", ln.Addr().String(), + ) + err := a.Server.ServeTLS( + ln, + conf.TLSCertPath(), + conf.TLSKeyPath(), + ) + if err != nil && err != http.ErrServerClosed { + log.Fatalf("failed to start administration server due to: %s", err) + } + } else { + // Setup TLS config from CertMagic + tlsConfig := a.certMagicConfig.TLSConfig() + tlsConfig.NextProtos = append([]string{"h2", "http/1.1"}, tlsConfig.NextProtos...) + + // Create new TLS listener with the config + tlsLn := tls.NewListener(ln, tlsConfig) + a.logger.Debugw("starting administration with automatic TLS", + "address", ln.Addr().String(), + "domain", adminHost, + ) + err := a.Server.Serve(tlsLn) + if err != nil && err != http.ErrServerClosed { + log.Fatalf("failed to start administration server due to: %s", err) + } + } + }() + + // test the connection to the administration server + // and send a startup message + // TODO the connectivity check has been disabled as it fucks up the auto tls + // as it calls the certmagic DecisionFunc from addreses such as ::1 and I am not + // sure we it is safe to allow list all of them or if I know all of the potential addresses. + /* + go func() { + a.logger.Debug("testing connectivity to administration server...") + // wait for connection to the server + attempts := 1 + for { + dialer := &net.Dialer{ + Timeout: time.Second, + KeepAlive: time.Second, + } + conn, err := tls.DialWithDialer( + dialer, + "tcp", + ln.Addr().String(), + &tls.Config{ + InsecureSkipVerify: true, + }, + ) + if err != nil { + a.logger.Debugw("failed to connect to administration server", + "attempt", attempts, + ) + time.Sleep(1 * time.Second) + if attempts == 3 { + startupMessage <- server.NewStartupMessage( + false, + fmt.Errorf("failed to connect to administration server"), + ) + break + } + attempts += 1 + continue + } + conn.Close() + startupMessage <- server.NewStartupMessage(true, nil) + break + } + }() + */ + startupMessage <- server.NewStartupMessage(true, nil) + + return startupMessage, ln, nil +} + +// https://stackoverflow.com/questions/52294334/net-http-set-custom-logger +type fwdToZapWriter struct { + logger *zap.SugaredLogger +} + +func (fw *fwdToZapWriter) Write(p []byte) (n int, err error) { + fw.logger.Errorw(string(p)) + return len(p), nil +} + +// SkipFirstTlsToZapWriter is a weird Writer that replaces itself +// when it has seen a TLS handshake error it is used for handling +// a special annoying case where a health check on startup creates +// a tls handshake that we want to ignore +type SkipFirstTlsToZapWriter struct { + logger *zap.SugaredLogger + // ignore first tls + serverPtr *http.Server +} + +func (fw *SkipFirstTlsToZapWriter) Write(p []byte) (n int, err error) { + if strings.Contains(string(p), "TLS handshake error") { + // After catching the first TLS error, replace the ErrorLog with direct logger + fw.serverPtr.ErrorLog = log.New( + &fwdToZapWriter{ + logger: fw.logger, + }, + "", + 0, + ) + return len(p), nil + } + fw.logger.Errorw(string(p)) + return len(p), nil +} diff --git a/backend/app/config.go b/backend/app/config.go new file mode 100644 index 0000000..9eb6274 --- /dev/null +++ b/backend/app/config.go @@ -0,0 +1,41 @@ +package app + +import ( + "fmt" + "io/fs" + "os" + "path/filepath" + + "github.com/go-errors/errors" + + "github.com/phishingclub/phishingclub/config" + "github.com/phishingclub/phishingclub/errs" +) + +// SetupConfig sets up the config +func SetupConfig( + enviroment string, + configFilePath string, +) (*config.Config, error) { + configFolder, configFile := filepath.Split(configFilePath) + filesystem := os.DirFS(configFolder) + configDTO, err := config.NewDTOFromFile(filesystem, configFile) + if err != nil && !errors.Is(err, fs.ErrNotExist) { + return nil, errs.Wrap(err) + } + if errors.Is(err, fs.ErrNotExist) { + fmt.Printf(" * No config loaded. Creating default config file at %s\n\n", configFilePath) + var conf *config.Config + if enviroment == MODE_DEVELOPMENT { + conf = config.NewDevDefaultConfig() + } else { + conf = config.NewProductionDefaultConfig() + } + err = conf.WriteToFile(configFilePath) + configDTO = conf.ToDTO() + if err != nil { + return nil, errs.Wrap(err) + } + } + return config.FromDTO(configDTO) +} diff --git a/backend/app/controllers.go b/backend/app/controllers.go new file mode 100644 index 0000000..33d59b3 --- /dev/null +++ b/backend/app/controllers.go @@ -0,0 +1,200 @@ +package app + +import ( + "github.com/phishingclub/phishingclub/controller" + "go.uber.org/zap" + "gorm.io/gorm" +) + +// Controllers is a collection of controllers +type Controllers struct { + Asset *controller.Asset + Attachment *controller.Attachment + Company *controller.Company + Health *controller.Health + Installer *controller.Install + InitialSetup *controller.InitialSetup + Page *controller.Page + Log *controller.Log + Option *controller.Option + User *controller.User + Domain *controller.Domain + Recipient *controller.Recipient + RecipientGroup *controller.RecipientGroup + SMTPConfiguration *controller.SMTPConfiguration + Email *controller.Email + CampaignTemplate *controller.CampaignTemplate + Campaign *controller.Campaign + QR *controller.QRGenerator + APISender *controller.APISender + AllowDeny *controller.AllowDeny + Webhook *controller.Webhook + Identifier *controller.Identifier + Version *controller.Version + SSO *controller.SSO + Update *controller.Update + Import *controller.Import +} + +// NewControllers creates a collection of controllers +func NewControllers( + staticAssetPath string, + attachmentsPath string, + repositories *Repositories, + services *Services, + logger *zap.SugaredLogger, + atomLogger *zap.AtomicLevel, + utillities *Utilities, + db *gorm.DB, +) *Controllers { + common := controller.Common{ + SessionService: services.Session, + Logger: logger, + Response: utillities.JSONResponseHandler, + } + asset := &controller.Asset{ + Common: common, + StaticAssetPath: staticAssetPath, + AssetService: services.Asset, + OptionService: services.Option, + DomainService: services.Domain, + } + attachment := &controller.Attachment{ + Common: common, + StaticAttachmentPath: attachmentsPath, + AttachmentService: services.Attachment, + OptionService: services.Option, + TemplateService: services.Template, + CompanyService: services.Company, + } + company := &controller.Company{ + Common: common, + CampaignService: services.Campaign, + CompanyService: services.Company, + RecipientService: services.Recipient, + } + initialSetup := &controller.InitialSetup{ + Common: common, + CLIOutputter: utillities.CLIOutputter, + OptionRepository: repositories.Option, + InstallService: services.InstallSetup, + OptionService: services.Option, + } + installer := &controller.Install{ + Common: common, + UserRepository: repositories.User, + CompanyRepository: repositories.Company, + OptionRepository: repositories.Option, + PasswordHasher: *utillities.PasswordHasher, + DB: db, + } + health := &controller.Health{} + log := &controller.Log{ + Common: common, + OptionService: services.Option, + Database: db, + LoggerAtom: atomLogger, + } + page := &controller.Page{ + Common: common, + PageService: services.Page, + TemplateService: services.Template, + } + option := &controller.Option{ + Common: common, + OptionService: services.Option, + } + user := &controller.User{ + Common: common, + UserService: services.User, + } + domain := &controller.Domain{ + Common: common, + DomainService: services.Domain, + } + recipient := &controller.Recipient{ + Common: common, + RecipientService: services.Recipient, + } + recipientGroup := &controller.RecipientGroup{ + Common: common, + RecipientGroupService: services.RecipientGroup, + } + smtpConfiguration := &controller.SMTPConfiguration{ + Common: common, + SMTPConfigurationService: services.SMTPConfiguration, + } + email := &controller.Email{ + Common: common, + EmailService: services.Email, + TemplateService: services.Template, + EmailRepository: repositories.Email, + } + campaignTemplate := &controller.CampaignTemplate{ + Common: common, + CampaignTemplateService: services.CampaignTemplate, + } + campaign := &controller.Campaign{ + Common: common, + CampaignService: services.Campaign, + } + qr := &controller.QRGenerator{ + Common: common, + } + apiSender := &controller.APISender{ + Common: common, + APISenderService: services.APISender, + } + allowDeny := &controller.AllowDeny{ + Common: common, + AllowDenyService: services.AllowDeny, + } + webhook := &controller.Webhook{ + Common: common, + WebhookService: services.Webhook, + } + identifier := &controller.Identifier{ + Common: common, + IdentifierService: services.Identifier, + } + version := &controller.Version{Common: common} + sso := &controller.SSO{Common: common, SSO: services.SSO} + update := &controller.Update{ + Common: common, + UpdateService: services.Update, + OptionService: services.Option, + } + importController := &controller.Import{ + Common: common, + ImportService: services.Import, + } + + return &Controllers{ + Asset: asset, + Attachment: attachment, + Company: company, + Installer: installer, + InitialSetup: initialSetup, + Health: health, + Page: page, + Log: log, + Option: option, + User: user, + Domain: domain, + Recipient: recipient, + RecipientGroup: recipientGroup, + SMTPConfiguration: smtpConfiguration, + Email: email, + CampaignTemplate: campaignTemplate, + Campaign: campaign, + QR: qr, + APISender: apiSender, + AllowDeny: allowDeny, + Webhook: webhook, + Identifier: identifier, + Version: version, + SSO: sso, + Update: update, + Import: importController, + } +} diff --git a/backend/app/database.go b/backend/app/database.go new file mode 100644 index 0000000..97502d4 --- /dev/null +++ b/backend/app/database.go @@ -0,0 +1,16 @@ +package app + +import ( + "github.com/phishingclub/phishingclub/config" + "github.com/phishingclub/phishingclub/database" + "gorm.io/gorm" +) + +// SetupDatabase sets up the database +// this includes creating the database connection +func SetupDatabase( + conf *config.Config, +) (*gorm.DB, error) { + // create db connection + return database.FromConfig(*conf) +} diff --git a/backend/app/logger.go b/backend/app/logger.go new file mode 100644 index 0000000..0ff7890 --- /dev/null +++ b/backend/app/logger.go @@ -0,0 +1,76 @@ +package app + +import ( + "github.com/go-errors/errors" + "github.com/phishingclub/phishingclub/config" + "github.com/phishingclub/phishingclub/log" + "github.com/phishingclub/phishingclub/version" + "go.uber.org/zap" + "go.uber.org/zap/zapcore" +) + +const ( + MODE_INTEGRATION_TEST = "integration_test" + MODE_DEVELOPMENT = "development" + MODE_PRODUCTION = "production" +) + +func createCore(core zapcore.Core) zapcore.Core { + return &stackCore{core} +} + +type stackCore struct { + zapcore.Core +} + +func (c *stackCore) Check(ent zapcore.Entry, ce *zapcore.CheckedEntry) *zapcore.CheckedEntry { + // dont add our core again if it's already been added + if ce != nil { + return ce + } + return ce.AddCore(ent, c) +} + +func (c *stackCore) Write(ent zapcore.Entry, fields []zapcore.Field) error { + // return c.Core.Write(ent, fields) + // look for error field and enhance the message with stack trace + for _, field := range fields { + if field.Key == "error" { + if err, ok := field.Interface.(error); ok { + if goErr, ok := err.(*errors.Error); ok { + ent.Stack = goErr.ErrorStack() + } + } + } + } + return c.Core.Write(ent, fields) +} + +func SetupLogger(loggerType string, conf *config.Config) (*zap.SugaredLogger, *zap.AtomicLevel, error) { + var logger *zap.Logger + var loggerAtom *zap.AtomicLevel + var err error + + switch loggerType { + case MODE_DEVELOPMENT: + logger, loggerAtom, err = log.NewDevelopmentLogger(conf) + case MODE_INTEGRATION_TEST: + fallthrough + case MODE_PRODUCTION: + fallthrough + default: + logger, loggerAtom, err = log.NewProductionLogger(conf) + } + + if err != nil { + return nil, nil, err + } + + // Create new logger with custom core + logger = zap.New(createCore(logger.Core())) + sgr := logger.Sugar() + if loggerType == MODE_PRODUCTION { + sgr = sgr.With("v-debug", version.Get()) + } + return sgr, loggerAtom, nil +} diff --git a/backend/app/middleware.go b/backend/app/middleware.go new file mode 100644 index 0000000..b6add01 --- /dev/null +++ b/backend/app/middleware.go @@ -0,0 +1,43 @@ +package app + +import ( + "github.com/gin-gonic/gin" + "github.com/phishingclub/phishingclub/config" + "github.com/phishingclub/phishingclub/middleware" + "go.uber.org/zap" +) + +// Middlwares is a collection of middlewares +type Middlewares struct { + IPLimiter gin.HandlerFunc + LoginRateLimiter gin.HandlerFunc + SessionHandler gin.HandlerFunc +} + +// NewMiddlewares creates a collection of middlewares +func NewMiddlewares( + requestPerSecond float64, + requestBurst int, + conf *config.Config, + services *Services, + utils *Utilities, + logger *zap.SugaredLogger, +) *Middlewares { + ipLimiter := middleware.NewAllowIPMiddleware(conf, logger) + loginThrottle := middleware.NewIPRateLimiterMiddleware( + requestPerSecond, // requests per second + requestBurst, // burst + ) + sessionHandler := middleware.NewSessionHandler( + services.Session, + services.User, + utils.JSONResponseHandler, + logger, + ) + + return &Middlewares{ + IPLimiter: ipLimiter, + LoginRateLimiter: loginThrottle, + SessionHandler: sessionHandler, + } +} diff --git a/backend/app/repositories.go b/backend/app/repositories.go new file mode 100644 index 0000000..a4e40a6 --- /dev/null +++ b/backend/app/repositories.go @@ -0,0 +1,59 @@ +package app + +import ( + "github.com/phishingclub/phishingclub/repository" + "gorm.io/gorm" +) + +// Repositories is a collection of repositories +type Repositories struct { + Asset *repository.Asset + Attachment *repository.Attachment + Company *repository.Company + Option *repository.Option + Page *repository.Page + Role *repository.Role + Session *repository.Session + User *repository.User + Domain *repository.Domain + Recipient *repository.Recipient + RecipientGroup *repository.RecipientGroup + SMTPConfiguration *repository.SMTPConfiguration + Email *repository.Email + Campaign *repository.Campaign + CampaignRecipient *repository.CampaignRecipient + CampaignTemplate *repository.CampaignTemplate + APISender *repository.APISender + AllowDeny *repository.AllowDeny + Webhook *repository.Webhook + Identifier *repository.Identifier +} + +// NewRepositories creates a collection of repositories +func NewRepositories( + db *gorm.DB, +) *Repositories { + option := &repository.Option{DB: db} + return &Repositories{ + Asset: &repository.Asset{DB: db}, + Attachment: &repository.Attachment{DB: db}, + Company: &repository.Company{DB: db}, + Option: option, + Page: &repository.Page{DB: db}, + Role: &repository.Role{DB: db}, + Session: &repository.Session{DB: db}, + User: &repository.User{DB: db}, + Domain: &repository.Domain{DB: db}, + Recipient: &repository.Recipient{DB: db, OptionRepository: option}, + RecipientGroup: &repository.RecipientGroup{DB: db}, + SMTPConfiguration: &repository.SMTPConfiguration{DB: db}, + Email: &repository.Email{DB: db}, + Campaign: &repository.Campaign{DB: db}, + CampaignRecipient: &repository.CampaignRecipient{DB: db}, + CampaignTemplate: &repository.CampaignTemplate{DB: db}, + APISender: &repository.APISender{DB: db}, + AllowDeny: &repository.AllowDeny{DB: db}, + Webhook: &repository.Webhook{DB: db}, + Identifier: &repository.Identifier{DB: db}, + } +} diff --git a/backend/app/server.go b/backend/app/server.go new file mode 100644 index 0000000..aca0aed --- /dev/null +++ b/backend/app/server.go @@ -0,0 +1,1182 @@ +package app + +import ( + "bytes" + "context" + "crypto/tls" + "fmt" + "log" + "mime" + "net" + "net/http" + "os" + "path/filepath" + "strings" + "text/template" + "time" + + "github.com/go-errors/errors" + + "github.com/caddyserver/certmagic" + securejoin "github.com/cyphar/filepath-securejoin" + "github.com/gin-gonic/gin" + "github.com/google/uuid" + "github.com/phishingclub/phishingclub/cache" + "github.com/phishingclub/phishingclub/config" + "github.com/phishingclub/phishingclub/data" + "github.com/phishingclub/phishingclub/database" + "github.com/phishingclub/phishingclub/errs" + "github.com/phishingclub/phishingclub/model" + "github.com/phishingclub/phishingclub/repository" + "github.com/phishingclub/phishingclub/server" + "github.com/phishingclub/phishingclub/service" + "github.com/phishingclub/phishingclub/utils" + "github.com/phishingclub/phishingclub/vo" + "go.uber.org/zap" + "gorm.io/gorm" +) + +const MAX_USER_AGENT_SAVED = 255 + +// Server is the public phishing server +type Server struct { + HTTPServer *http.Server + HTTPSServer *http.Server + db *gorm.DB + logger *zap.SugaredLogger + certMagicConfig *certmagic.Config + staticPath string + ownManagedTLSCertPath string + controllers *Controllers + services *Services + repositories *Repositories +} + +// NewServer returns a new server +func NewServer( + staticPath string, + ownManagedTLSCertPath string, + db *gorm.DB, + controllers *Controllers, + services *Services, + repositories *Repositories, + logger *zap.SugaredLogger, + certMagicConfig *certmagic.Config, +) *Server { + return &Server{ + staticPath: staticPath, + ownManagedTLSCertPath: ownManagedTLSCertPath, + db: db, + controllers: controllers, + services: services, + repositories: repositories, + logger: logger, + certMagicConfig: certMagicConfig, + } +} + +// defaultServer creates a new default HTTP server +// skipFirstTLS sets a writer that ignores the first TLS handshake error and then +// replaces the logger with the normal logger, this is a hack to fix a annoying output +// created from the port ready probing done while booting the application +func (s *Server) defaultServer(handler http.Handler, skipFirstTLS bool) *http.Server { + server := &http.Server{ + Handler: handler, + // The maximum duration for reading the entire request, including the request line, headers, and body + ReadTimeout: 15 * time.Second, + // The maximum duration for writing the entire response, including the response headers and body + WriteTimeout: 15 * time.Second, // Timeout for writing the response + // The maximum duration to wait for the next request when the connection is in the idle state + IdleTimeout: 10 * time.Second, + // The maximum duration for reading the request headers. + ReadHeaderTimeout: 2 * time.Second, + // Maximum size of request headers (512 KB) + MaxHeaderBytes: 1 << 19, + ErrorLog: log.New(&fwdToZapWriter{logger: s.logger}, "", 0), + } + if skipFirstTLS { + server.ErrorLog = log.New( + &SkipFirstTlsToZapWriter{ + logger: s.logger, + serverPtr: server, + }, "", 0, + ) + } + return server +} + +// host extract the host part of the request +func (s *Server) getHostOnly(host string) (string, error) { + if strings.Contains(host, ":") { + hostOnly, _, err := net.SplitHostPort(host) + if err != nil { + return "", errs.Wrap(err) + } + return hostOnly, nil + } + return host, nil +} + +// testConnection tests the connection to the server +// it starts a gorutine that attempts to connect via. tcp 3 times and +// it returns a channel that will be called with the result +func (s *Server) testTCPConnection(identifier string, addr string) chan server.StartupMessage { + c := server.NewStartupMessageChannel() + go func() { + s.logger.Debugw("testing connection", + "server", identifier, + ) + attempts := 1 + for { + dialer := &net.Dialer{ + Timeout: time.Second, + KeepAlive: time.Second, + } + conn, err := dialer.Dial("tcp", addr) + if err != nil { + s.logger.Debugw( + "failed to connect to server", + "server", identifier, + "attempt", attempts, + "error", err, + ) + time.Sleep(1 * time.Second) + if attempts == 3 { + c <- server.NewStartupMessage( + false, + fmt.Errorf("failed to connect to %s server", identifier), + ) + break + } + attempts += 1 + continue + } + // #nosec + conn.Close() + c <- server.NewStartupMessage(true, nil) + break + } + + }() + return c +} + +// checkAndServeAssets checks if the request is for static content +// and serves it if it is +// return true if the request was for static content +func (s *Server) checkAndServeAssets(c *gin.Context, host string) bool { + // check if the path is a file + staticPath, err := securejoin.SecureJoin(s.staticPath, host) + if err != nil { + s.logger.Infow("insecure path attempted on asset", + "error", err, + ) + return false + } + staticPath, err = securejoin.SecureJoin(staticPath, c.Request.URL.Path) + if err != nil { + s.logger.Infow("insecure path attempted on asset", + "error", err, + ) + return false + } + // check if path exists on the specific domain + info, err := os.Stat(staticPath) + if err != nil { + s.logger.Debugw("not found on domain: %s", + "path", staticPath, + ) + // check if this is a global asset + return s.checkAndServeSharedAsset(c) + } + if info.IsDir() { + return false + } + // checks if the path is a directory + c.Header("Content-Type", mime.TypeByExtension(filepath.Ext(staticPath))) + c.File(staticPath) + return true +} + +func (s *Server) checkAndServeSharedAsset(c *gin.Context) bool { + // check if the path is a file + // TODO I need to somehow make this safe from directory traversal + staticPath, err := securejoin.SecureJoin( + s.staticPath+"/shared", + c.Request.URL.Path, + ) + if err != nil { + s.logger.Infow("insecure path attempted on asset", + "error", err, + ) + return false + } + // check if path exists + info, err := os.Stat(staticPath) + if err != nil { + _ = err + return false + } + if info.IsDir() { + return false + } + // checks if the path is a directory + c.Header("Content-Type", mime.TypeByExtension(filepath.Ext(staticPath))) + c.File(staticPath) + return true +} + +// Handler is middleware that takes care of everything related to incoming phishing requests +// checks if the domain is valid and usable +// checks if the request is for a phishing page +// checks if the request is for a assets +// checks if the request should be redirected +// checks if the request is for a static page or static not found page +func (s *Server) Handler(c *gin.Context) { + host, err := s.getHostOnly(c.Request.Host) + if err != nil { + s.logger.Debugw("failed to parse host", + "error", err, + ) + c.Status(http.StatusNotFound) + c.Abort() + return + } + // check if the domain is valid + // use DB directly here to avoid getting unnecessary data + // as a domain contains big blobs for static content + var domain *database.Domain + res := s.db. + Select("id, name, host_website, redirect_url"). + Where("name = ?", host). + First(&domain) + + if res.RowsAffected == 0 { + s.logger.Debug("domain not found") + c.Status(http.StatusNotFound) + c.Abort() + return + } + // check if the request is for a tacking pixel + if c.Request.URL.Path == "/wf/open" { + s.controllers.Campaign.TrackingPixel(c) + c.Abort() + return + } + + // check if the request is for a phishing page or is denied by allow/deny list + isRequestForPhishingPageOrDenied, err := s.checkAndServePhishingPage(c, domain) + if err != nil { + s.logger.Errorw("failed to serve phishing page", + "error", err, + ) + c.Status(http.StatusInternalServerError) + c.Abort() + return + } + // if this was a request for the phishing page and there was no error + if isRequestForPhishingPageOrDenied { + return + } + // check if the request is for assets + servedAssets := s.checkAndServeAssets(c, host) + if servedAssets { + s.logger.Debug("served static asset") + c.Abort() + return + } + // check if the request should be redirected + if domain.RedirectURL != "" { + c.Redirect(http.StatusMovedPermanently, domain.RedirectURL) + c.Abort() + return + } + // check if the domain should serve static content + if !domain.HostWebsite { + s.logger.Debugw("404 - Domain does not serve static content", + "host", host, + ) + c.Status(http.StatusNotFound) + c.Abort() + return + } + // check if the path is a OK page or not found + if c.Request.URL.Path != "/" { + res := s.db. + Select("page_not_found_content"). + Where("name = ?", host). + First(&domain) + + if res.RowsAffected == 0 { + s.logger.Errorw("domain page unexpectedly not found", + "host", host, + ) + c.Status(http.StatusInternalServerError) + c.Abort() + return + } + // TODO extract this into another method, maybe file + t, err := template. + New("staticContent"). + Funcs(service.TemplateFuncs()). + Parse(string(domain.PageNotFoundContent)) + + if err != nil { + s.logger.Errorw("failed to parse static content template", + "error", err, + ) + c.Status(http.StatusInternalServerError) + return + } + var buf bytes.Buffer + err = t.Execute(&buf, map[string]any{ + "Domain": host, + "BaseURL": "https://" + host + "/", + "URL": c.Request.URL.String(), + }) + if err != nil { + s.logger.Errorw("failed to execute static content template", + "error", err, + ) + c.Status(http.StatusInternalServerError) + return + } + c.Data( + http.StatusNotFound, + "text/html; charset=utf-8", + []byte(buf.Bytes()), + ) + c.Abort() + return + } + // serve the static page + res = s.db. + Select("page_content"). + Where("name = ?", host). + First(&domain) + + if res.RowsAffected == 0 { + s.logger.Errorw("static page was unexpectedly not found", + "host", host, + ) + c.Status(http.StatusInternalServerError) + c.Abort() + return + } + t, err := template. + New("staticContent"). + Funcs(service.TemplateFuncs()). + Parse(domain.PageContent) + + if err != nil { + s.logger.Errorw("failed to parse static content template", + "error", errs.Wrap(err), + ) + c.Status(http.StatusInternalServerError) + return + } + buf := &bytes.Buffer{} + err = t.Execute(buf, map[string]any{ + "Domain": host, + "BaseURL": "https://" + host + "/", + "URL": "https://" + host + c.Request.URL.String(), + }) + if err != nil { + s.logger.Errorw("failed to execute static content template", + "error", errs.Wrap(err), + ) + c.Status(http.StatusInternalServerError) + return + } + + c.Data( + http.StatusOK, + "text/html; charset=utf-8", + buf.Bytes(), + ) + c.Abort() +} + +// handlerNotFound handles the request for a not found page +func (s *Server) handlerNotFound(c *gin.Context) { + host, err := s.getHostOnly(c.Request.Host) + if err != nil { + s.logger.Debugw("failed to parse host", + "host", c.Request.Host, + "error", err, + ) + c.Status(http.StatusNotFound) + return + } + var domain *database.Domain + res := s.db. + Select("page_not_found_content"). + Where("name = ?", host). + Find(&domain) + + if res.RowsAffected == 0 { + s.logger.Debugw("host not found", + "host", host, + ) + c.Status(http.StatusNotFound) + return + } + t := template.New("staticContent") + t = t.Funcs(service.TemplateFuncs()) + tmpl, err := t.Parse(string(domain.PageNotFoundContent)) + if err != nil { + s.logger.Errorw("failed to parse static content template", + "error", errs.Wrap(err), + ) + c.Status(http.StatusInternalServerError) + return + } + var buf bytes.Buffer + err = tmpl.Execute(&buf, map[string]any{ + "Domain": host, + "BaseURL": "https://" + host + "/", + "URL": c.Request.URL.String(), + }) + if err != nil { + s.logger.Errorw("failed to execute static content template", + "error", err, + ) + c.Status(http.StatusInternalServerError) + return + } + c.Data( + http.StatusNotFound, + "text/html; charset=utf-8", + []byte(buf.Bytes()), + ) +} + +// checkAndServePhishingPage serves a phishing page +// returns a bool if the request was for a phishing page +// and an error if there was an error +func (s *Server) checkAndServePhishingPage( + c *gin.Context, + domain *database.Domain, +) (bool, error) { + // get all identifiers and collect all that match query params + identifiers, err := s.repositories.Identifier.GetAll(c, &repository.IdentifierOption{}) + if err != nil { + s.logger.Debugw("failed to get all identifiers", + "error", err, + ) + return false, errs.Wrap(err) + } + query := c.Request.URL.Query() + matchingParams := []string{} + for _, identifier := range identifiers.Rows { + if name := identifier.Name.MustGet(); query.Has(name) { + matchingParams = append(matchingParams, name) + } + } + // check which match a UUIDv4 and check if any of those match a campaignrecipient id + matchingUUIDParams := []*uuid.UUID{} + for _, param := range matchingParams { + if id, err := uuid.Parse(query.Get(param)); err == nil { + matchingUUIDParams = append(matchingUUIDParams, &id) + } + } + if len(matchingUUIDParams) == 0 { + s.logger.Debugw("'campaignrecipient' not found", + "error", err, + ) + return false, nil + } + var campaignRecipient *model.CampaignRecipient + var campaignRecipientID *uuid.UUID + // however limit it to 3 attempts to prevent a DoS attack + for i, v := range matchingUUIDParams { + if i > 2 { + s.logger.Warn("too many attempts to get campaign recipient by a UUID. Ensure that there are no more than max 3 UUID in the phishing URL!") + return false, nil + } + campaignRecipient, err = s.repositories.CampaignRecipient.GetByCampaignRecipientID( + c, + v, + ) + if err != nil && !errors.Is(err, gorm.ErrRecordNotFound) { + s.logger.Debugw("failed to get active campaign and campaign recipient by query param", + "error", err, + ) + return false, fmt.Errorf("failed to get active campaign and campaign recipient by query param: %s", err) + } + if campaignRecipient != nil { + campaignRecipientID = v + break + } + } + // there was a campagin recipient id but it did not match a campaign + // this could be because there is an ID value but is not for us + if campaignRecipient == nil { + s.logger.Debugw("'campaignrecipient' not found", + "error", err, + ) + return false, nil + } + // at this point we know which url param matched the campaignrecipientID, however + // it could have been any available identifier and not the one matching the campaign template + // it is possible now to check if it is correct, however it does not matter as the campaign + // recipient is already found + campaignID := campaignRecipient.CampaignID.MustGet() + campaign, err := s.repositories.Campaign.GetByID( + c, + &campaignID, + &repository.CampaignOption{}, + ) + // if there was an error + if err != nil { + s.logger.Debugw("failed to get active campaign", + "error", err, + ) + return false, fmt.Errorf("failed to get active campaign and campaign recipient by public id: %s", err) + } + // check if the campaign is active + if !campaign.IsActive() { + s.logger.Debugw("campaign is not active", + "campaignID", campaign.ID.MustGet(), + ) + return false, nil + } + templateID, err := campaign.TemplateID.Get() + if err != nil { + s.logger.Debugw("campaign has no template", + "error", err, + ) + return false, nil + } + cTemplate, err := s.repositories.CampaignTemplate.GetByID( + c, + &templateID, + &repository.CampaignTemplateOption{ + WithIdentifier: true, + }, + ) + if err != nil { + s.logger.Debugw("failed to get campaign template", + "templateID", templateID.String(), + "error", err, + ) + return false, fmt.Errorf("failed to get campaign template: %s", err) + } + // check that the requesters IP is allow listed + ip := c.ClientIP() + servedByIPFilter, err := s.checkIPFilter(c, ip, campaign, domain, &campaignID) + if err != nil { + return false, err + } + if servedByIPFilter { + return true, nil + } + // get the recipient + // if the recipient has been anonymized or removed, stop + recipientID, err := campaignRecipient.RecipientID.Get() + if err != nil { + return false, nil + } + recipient, err := s.repositories.Recipient.GetByID( + c, + &recipientID, + &repository.RecipientOption{}, + ) + if err != nil { + return false, fmt.Errorf("failed to get recipient: %s", err) + } + // figure out which page types this template has + var beforePageID *uuid.UUID + if v, err := cTemplate.BeforeLandingPageID.Get(); err == nil { + beforePageID = &v + } + landingPageID, err := cTemplate.LandingPageID.Get() + if err != nil { + return false, fmt.Errorf("Template is incomplete, missing landing page ID: %s", err) + } + var afterPageID *uuid.UUID + if v, err := cTemplate.AfterLandingPageID.Get(); err == nil { + afterPageID = &v + } + + stateParamKey := cTemplate.StateIdentifier.Name.MustGet() + pageTypeQuery := "" + encryptedParam := c.Query(stateParamKey) + secret := utils.UUIDToSecret(&campaignID) + if v, err := utils.Decrypt(encryptedParam, secret); err == nil { + pageTypeQuery = v + } + // if there is no page type then this is the before landing page or the landing page + var pageID *uuid.UUID + nextPageType := "" + currentPageType := "" + if len(pageTypeQuery) == 0 { + if beforePageID != nil { + pageID = beforePageID + currentPageType = data.PAGE_TYPE_BEFORE + nextPageType = data.PAGE_TYPE_LANDING + } else { + pageID = &landingPageID + currentPageType = data.PAGE_TYPE_LANDING + if afterPageID != nil { + nextPageType = data.PAGE_TYPE_AFTER + } else { + nextPageType = data.PAGE_TYPE_DONE // landing page is final page + } + } + // if there is a page type, then we use that + } else { + switch pageTypeQuery { + // this is not relevant - already taken care of, ignore it + case data.PAGE_TYPE_BEFORE: + // this is set if the previous page was a before page + case data.PAGE_TYPE_LANDING: + pageID = &landingPageID + currentPageType = data.PAGE_TYPE_LANDING + if afterPageID != nil { + nextPageType = data.PAGE_TYPE_AFTER + } else { + nextPageType = data.PAGE_TYPE_DONE // landiung page is final page + } + // this is set if the previous page was a landing page + case data.PAGE_TYPE_AFTER: + if afterPageID != nil { + pageID = afterPageID + } else { + pageID = &landingPageID + } + // next page after a after landinge page, is the same page + currentPageType = data.PAGE_TYPE_AFTER + nextPageType = data.PAGE_TYPE_DONE + case data.PAGE_TYPE_DONE: + if afterPageID != nil { + pageID = afterPageID + } else { + pageID = &landingPageID + } + currentPageType = data.PAGE_TYPE_DONE + nextPageType = data.PAGE_TYPE_DONE + } + } + isPOSTRequest := c.Request.Method == http.MethodPost + // if this is a POST request, then save the submitted data + if isPOSTRequest { + submitDataEventID := cache.EventIDByName[data.EVENT_CAMPAIGN_RECIPIENT_SUBMITTED_DATA] + err = c.Request.ParseForm() + if err != nil { + return true, fmt.Errorf("failed to parse submitted form data: %s", err) + } + newEventID := uuid.New() + campaignID := campaign.ID.MustGet() + clientIP := vo.NewOptionalString64Must(c.ClientIP()) + userAgent := vo.NewOptionalString255Must(utils.Substring(c.Request.UserAgent(), 0, MAX_USER_AGENT_SAVED)) + submittedData := vo.NewEmptyOptionalString1MB() + if campaign.SaveSubmittedData.MustGet() { + submittedData, err = vo.NewOptionalString1MB(c.Request.PostForm.Encode()) + if err != nil { + return true, fmt.Errorf("user submitted phishing data too large: %s", err) + } + } + var event *model.CampaignEvent + // only save data if red team flag is set + if !campaign.IsAnonymous.MustGet() { + event = &model.CampaignEvent{ + ID: &newEventID, + CampaignID: &campaignID, + RecipientID: &recipientID, + IP: clientIP, + UserAgent: userAgent, + EventID: submitDataEventID, + Data: submittedData, + } + } else { + ua := vo.NewEmptyOptionalString255() + data := vo.NewEmptyOptionalString1MB() + event = &model.CampaignEvent{ + ID: &newEventID, + CampaignID: &campaignID, + RecipientID: nil, + IP: vo.NewEmptyOptionalString64(), + UserAgent: ua, + EventID: submitDataEventID, + Data: data, + } + } + err = s.repositories.Campaign.SaveEvent(c, event) + if err != nil { + return true, fmt.Errorf("failed to save campaign event: %s", err) + } + // check and update if most notable event for recipient + currentNotableEventID, _ := campaignRecipient.NotableEventID.Get() + if cache.IsMoreNotableCampaignRecipientEventID( + ¤tNotableEventID, + submitDataEventID, + ) { + campaignRecipient.NotableEventID.Set(*submitDataEventID) + err := s.repositories.CampaignRecipient.UpdateByID( + c, + campaignRecipientID, + campaignRecipient, + ) + if err != nil { + s.logger.Errorw( + "failed to update notable event", + "campaignRecipientID", campaignRecipientID.String(), + "error", err, + ) + return true, errs.Wrap(err) + } + } + // handle webhook + webhookID, err := s.repositories.Campaign.GetWebhookIDByCampaignID( + c, + &campaignID, + ) + if err != nil && !errors.Is(err, gorm.ErrRecordNotFound) { + s.logger.Errorw("failed to get webhook id by campaign id", + "campaignID", campaignID.String(), + "error", err, + ) + return true, errs.Wrap(err) + } + if webhookID != nil { + err = s.services.Campaign.HandleWebhook( + // TODO this should be tied to a application wide context not the request + context.TODO(), + webhookID, + &campaignID, + &recipientID, + data.EVENT_CAMPAIGN_RECIPIENT_SUBMITTED_DATA, + ) + if err != nil { + return true, fmt.Errorf("failed to handle webhook: %s", err) + } + } + } + // if redirect && POST && final page + if isPOSTRequest { + if redirectURL, err := cTemplate.AfterLandingPageRedirectURL.Get(); err == nil { + if v := redirectURL.String(); len(v) > 0 { + // if the current page is landing and there is no after, redirect + if currentPageType == data.PAGE_TYPE_DONE { + c.Redirect(http.StatusSeeOther, v) + c.Abort() + return true, nil + } + } + } + } + // fetch the page + page, err := s.repositories.Page.GetByID( + c, + pageID, + &repository.PageOption{}, + ) + if err != nil { + return true, fmt.Errorf("failed to get landing page: %s", err) + } + // fetch the sender email to use for the template + emailID := cTemplate.EmailID.MustGet() + email, err := s.repositories.Email.GetByID( + c, + &emailID, + &repository.EmailOption{}, + ) + if err != nil { + return true, fmt.Errorf("failed to get email: %s", err) + } + encryptedParam, err = utils.Encrypt(nextPageType, secret) + if err != nil { + return true, fmt.Errorf("failed to encrypt next page type: %s", err) + } + urlPath := cTemplate.URLPath.MustGet().String() + err = s.renderPageTemplate( + c, + domain, + email, + campaignRecipientID, + recipient, + page, + cTemplate, + encryptedParam, + urlPath, + ) + if err != nil { + return true, fmt.Errorf("failed to render phishing page: %s", err) + } + // save the event of page has been visited + visitEventID := uuid.New() + eventName := "" + switch currentPageType { + case data.PAGE_TYPE_BEFORE: + eventName = data.EVENT_CAMPAIGN_RECIPIENT_BEFORE_PAGE_VISITED + case data.PAGE_TYPE_LANDING: + eventName = data.EVENT_CAMPAIGN_RECIPIENT_PAGE_VISITED + case data.PAGE_TYPE_AFTER: + eventName = data.EVENT_CAMPAIGN_RECIPIENT_AFTER_PAGE_VISITED + } + eventID := cache.EventIDByName[eventName] + clientIP := vo.NewOptionalString64Must(c.ClientIP()) + userAgent := vo.NewOptionalString255Must(utils.Substring(c.Request.UserAgent(), 0, MAX_USER_AGENT_SAVED)) + var visitEvent *model.CampaignEvent + if !campaign.IsAnonymous.MustGet() { + visitEvent = &model.CampaignEvent{ + ID: &visitEventID, + CampaignID: &campaignID, + RecipientID: &recipientID, + IP: clientIP, + UserAgent: userAgent, + EventID: eventID, + Data: vo.NewEmptyOptionalString1MB(), + } + } else { + ua := vo.NewEmptyOptionalString255() + visitEvent = &model.CampaignEvent{ + ID: &visitEventID, + CampaignID: &campaignID, + RecipientID: nil, + IP: vo.NewEmptyOptionalString64(), + UserAgent: ua, + EventID: eventID, + Data: vo.NewEmptyOptionalString1MB(), + } + } + // only log the page visit if it is not after the final page + if currentPageType != data.PAGE_TYPE_DONE { + err = s.repositories.Campaign.SaveEvent( + c, + visitEvent, + ) + if err != nil { + return true, fmt.Errorf("failed to save campaign event: %s", err) + } + } + // check and update if most notable event for recipient + currentNotableEventID, _ := campaignRecipient.NotableEventID.Get() + if cache.IsMoreNotableCampaignRecipientEventID( + ¤tNotableEventID, + eventID, + ) { + campaignRecipient.NotableEventID.Set(*eventID) + err := s.repositories.CampaignRecipient.UpdateByID( + c, + campaignRecipientID, + campaignRecipient, + ) + if err != nil { + s.logger.Errorw("failed to update notable event", + "campaignRecipientID", campaignRecipientID.String(), + "eventID", eventID.String(), + "error", err, + ) + return true, errs.Wrap(err) + } + } + // handle webhook + webhookID, err := s.repositories.Campaign.GetWebhookIDByCampaignID( + c, + &campaignID, + ) + if err != nil && !errors.Is(err, gorm.ErrRecordNotFound) { + s.logger.Errorw("failed to get webhook id by campaign id %s", + "campaignID", campaignID.String(), + "error", err, + ) + return true, errs.Wrap(err) + } + if webhookID == nil { + return true, nil + } + // do not notify on visiting the page done as it is a repeat of the flow + if currentPageType != data.PAGE_TYPE_DONE { + err = s.services.Campaign.HandleWebhook( + // TODO this should be tied to a application wide context not the request + context.TODO(), + webhookID, + &campaignID, + &recipientID, + eventName, + ) + if err != nil { + return true, fmt.Errorf("failed to handle webhook: %s", err) + } + } + + return true, nil +} + +func (s *Server) renderDenyPage( + c *gin.Context, + domain *database.Domain, + pageID *uuid.UUID, +) error { + ctx := c.Request.Context() + page, err := s.repositories.Page.GetByID( + ctx, + pageID, + &repository.PageOption{}, + ) + if err != nil { + return fmt.Errorf("failed to get landing page: %s", err) + } + tmpl, err := template.New("page").Parse(page.Content.MustGet().String()) + if err != nil { + return fmt.Errorf("failed to parse page template: %s", err) + } + baseURL := "https://" + domain.Name + w := bytes.NewBuffer([]byte{}) + err = tmpl.Execute(w, + map[string]string{ + "BaseURL": baseURL, + }) + if err != nil { + return fmt.Errorf("failed to execute page template: %s", err) + } + c.Data(http.StatusOK, "text/html; charset=utf-8", w.Bytes()) + c.Abort() + s.logger.Debugw("rendered deny page: %s", + "pageName", page.Name.MustGet().String(), + "pageID", page.ID.MustGet().String(), + ) + return nil + +} + +// AssignRoutes assigns the routes to the server +func (s *Server) AssignRoutes(r *gin.Engine) { + r.Use(s.Handler) + r.NoRoute(s.handlerNotFound) +} + +func (s *Server) StartHTTP( + r *gin.Engine, + conf *config.Config, +) (chan server.StartupMessage, net.Listener, error) { + addr := conf.PhishingHTTPNetAddress() + ln, err := net.Listen( + "tcp", + addr, + ) + if err != nil { + return nil, nil, fmt.Errorf("failed to listen on %s due to: %s", addr, err) + } + s.HTTPServer = s.defaultServer(r, false) + + go func() { + s.logger.Debugw("starting phishing HTTP server", + "address", addr, + ) + // handle on-demand http TLS challenges + myACME := certmagic.NewACMEIssuer(s.certMagicConfig, certmagic.DefaultACME) + myACME.HTTPChallengeHandler(r) + err := s.HTTPServer.Serve(ln) + if err != nil && err != http.ErrServerClosed { + s.logger.Fatalw("failed to start phishing HTTP server", + "error", err, + ) + } + }() + // start a routine to test the connection + startupMessage := s.testTCPConnection("HTTP phishing server", addr) + return startupMessage, ln, nil +} + +// StartHTTPS starts the server and returns a signal channel +func (s *Server) StartHTTPS( + r *gin.Engine, + conf *config.Config, +) (chan server.StartupMessage, net.Listener, error) { + addr := conf.PhishingHTTPSNetAddress() + // create supplied cert path if it does not exist + err := os.MkdirAll(s.ownManagedTLSCertPath, 0750) + if err != nil { + return nil, nil, fmt.Errorf("failed to create supplied certs path %s: %s", s.ownManagedTLSCertPath, err) + } + // cache all own supplied certs + folders, err := os.ReadDir(s.ownManagedTLSCertPath) + if err != nil { + s.logger.Warnw("failed to read supplied certs folder", + "path", s.ownManagedTLSCertPath, + "error", err, + ) + } + for _, folder := range folders { + if !folder.IsDir() { + continue + } + // get the folder path + folderPath := filepath.Join(s.ownManagedTLSCertPath, folder.Name()) + // find .pem and .key files + certFile := filepath.Join(folderPath, "cert.pem") + keyFile := filepath.Join(folderPath, "cert.key") + // check if both files exist + _, err := os.Stat(certFile) + if err != nil { + s.logger.Warnw("certificate file missing", + "folder", folder.Name(), + "error", err, + ) + continue + } + _, err = os.Stat(keyFile) + if err != nil { + s.logger.Warnw("certificate key file missing", + "folder", folder.Name(), + "error", err, + ) + continue + } + hash, err := s.certMagicConfig.CacheUnmanagedCertificatePEMFile( + context.Background(), + certFile, + keyFile, + []string{}, + ) + if err != nil { + s.logger.Warnw("failed to cache certificate", + "folder", folder.Name(), + "error", err, + ) + continue + } + s.logger.Debugw("cached certificate", + "folder", folder.Name(), + "hash", hash, + ) + } + // setup TLS config + tlsConf := s.certMagicConfig.TLSConfig() + tlsConf.NextProtos = append([]string{"h2"}, tlsConf.NextProtos...) + // setup gin + ln, err := tls.Listen( + "tcp", + addr, + tlsConf, + ) + if err != nil { + return nil, nil, fmt.Errorf("failed to listen on %s due to: %s", ln.Addr().String(), err) + } + s.HTTPSServer = s.defaultServer(r, true) + // start server + go func() { + s.logger.Debugw("starting phishing HTTPS server", + "address", addr, + ) + err := s.HTTPSServer.Serve(ln) + + if err != nil && err != http.ErrServerClosed { + s.logger.Fatalw("failed to start phishing HTTPS server", + "error", err, + ) + } + }() + // start a routine to test the connection + startupMessage := s.testTCPConnection("HTTPS phishing server", addr) + return startupMessage, ln, nil +} + +// renderPageTempate renders a page template +func (s *Server) renderPageTemplate( + c *gin.Context, + domain *database.Domain, + email *model.Email, + campaignRecipientID *uuid.UUID, + recipient *model.Recipient, + page *model.Page, + campaignTemplate *model.CampaignTemplate, + stateParam string, + urlPath string, +) error { + content, err := page.Content.Get() + if err != nil { + return fmt.Errorf("no page content set to render: %s", err) + } + phishingPage, err := s.services.Template.CreatePhishingPage( + domain, + email, + campaignRecipientID, + recipient, + content.String(), + campaignTemplate, + stateParam, + urlPath, + ) + if err != nil { + return fmt.Errorf("failed to create phishing page: %s", err) + } + c.Data(http.StatusOK, "text/html; charset=utf-8", phishingPage.Bytes()) + c.Abort() + s.logger.Debugw("served phishing page", + "pageID", page.ID.MustGet().String(), + "pageName", page.Name.MustGet().String(), + ) + return nil +} + +func (s *Server) checkIPFilter( + ctx *gin.Context, + ip string, + campaign *model.Campaign, + domain *database.Domain, + campaignID *uuid.UUID, +) (bool, error) { + allowDenyLEntries, err := s.repositories.Campaign.GetAllDenyByCampaignID(ctx, campaignID) + if err != nil && !errors.Is(err, gorm.ErrRecordNotFound) { + s.logger.Debugw("failed to get deny list for campaign", + "campaignID", campaignID.String(), + "error", err, + ) + return false, fmt.Errorf("failed to get deny list for campaign: %s", err) + } + // if there is a deny list, check if the IP allowed / denied + // when allow listing we must check all entries to see if we have a allowed IP + // when deny listing only a single entry needs to deny the IP + isAllowListing := false + allowed := len(allowDenyLEntries) == 0 + for i, allowDeny := range allowDenyLEntries { + if i == 0 { + isAllowListing = allowDeny.Allowed.MustGet() + if !isAllowListing { + // if deny listing, then by default the IP is allowed until proven otherwise + allowed = true + } + } + ok, err := allowDeny.IsIPAllowed(ip) + if err != nil { + return false, errs.Wrap(err) + } + if isAllowListing && ok { + s.logger.Debugw("IP is allow listed", + "ip", ip, + "list name", allowDeny.Name.MustGet().String(), + "list id", allowDeny.ID.MustGet().String(), + ) + allowed = true + break + // if it is a deny list and a IP is not ok, we can break + } else if !isAllowListing && !ok { + s.logger.Debugw("IP is deny listed", + "ip", ip, + "list name", allowDeny.Name.MustGet().String(), + "list id", allowDeny.ID.MustGet().String(), + ) + allowed = false + break + } + } + if !allowed { + s.logger.Debugw("IP is not allowed", + "ip", ip, + ) + if denyPageID, err := campaign.DenyPageID.Get(); err == nil { + err = s.renderDenyPage(ctx, domain, &denyPageID) + if err != nil { + return true, fmt.Errorf("failed to render deny page: %s", err) + } + return true, nil + } + ctx.AbortWithStatus(http.StatusNotFound) + return true, nil + } + return false, nil +} diff --git a/backend/app/services.go b/backend/app/services.go new file mode 100644 index 0000000..fb23e4a --- /dev/null +++ b/backend/app/services.go @@ -0,0 +1,254 @@ +package app + +import ( + "github.com/caddyserver/certmagic" + "github.com/phishingclub/phishingclub/service" + "go.uber.org/zap" + "gorm.io/gorm" +) + +// Services is a collection of services +type Services struct { + Asset *service.Asset + Attachment *service.Attachment + File *service.File + Company *service.Company + InstallSetup *service.InstallSetup + Option *service.Option + Page *service.Page + Session *service.Session + User *service.User + Domain *service.Domain + Recipient *service.Recipient + RecipientGroup *service.RecipientGroup + SMTPConfiguration *service.SMTPConfiguration + Email *service.Email + CampaignTemplate *service.CampaignTemplate + Campaign *service.Campaign + Template *service.Template + APISender *service.APISender + AllowDeny *service.AllowDeny + Webhook *service.Webhook + Identifier *service.Identifier + Version *service.Version + SSO *service.SSO + Update *service.Update + Import *service.Import +} + +// NewServices creates a collection of services +func NewServices( + db *gorm.DB, + repositories *Repositories, + logger *zap.SugaredLogger, + utilities *Utilities, + assetPath string, + attachmentPath string, + ownManagedCertificatePath string, + enviroment string, + certMagicConfig *certmagic.Config, + certMagicCache *certmagic.Cache, + licenseServerURL string, +) *Services { + common := service.Common{ + Logger: logger, + } + templateService := &service.Template{ + Common: common, + } + file := &service.File{ + Common: common, + } + asset := &service.Asset{ + Common: common, + RootFolder: assetPath, + FileService: file, + AssetRepository: repositories.Asset, + DomainRepository: repositories.Domain, + } + attachment := &service.Attachment{ + Common: common, + RootFolder: attachmentPath, + FileService: file, + AttachmentRepository: repositories.Attachment, + EmailRepository: repositories.Email, + } + installSetup := &service.InstallSetup{ + Common: common, + UserRepository: repositories.User, + RoleRepository: repositories.Role, + CompanyRepository: repositories.Company, + PasswordHasher: utilities.PasswordHasher, + } + sessionService := &service.Session{ + Common: common, + SessionRepository: repositories.Session, + } + optionService := &service.Option{ + Common: common, + OptionRepository: repositories.Option, + } + userService := &service.User{ + Common: common, + UserRepository: repositories.User, + RoleRepository: repositories.Role, + CompanyRepository: repositories.Company, + PasswordHasher: utilities.PasswordHasher, + } + recipient := &service.Recipient{ + Common: common, + RecipientRepository: repositories.Recipient, + RecipientGroupRepository: repositories.RecipientGroup, + CampaignRepository: repositories.Campaign, + CampaignRecipientRepository: repositories.CampaignRecipient, + } + recipientGroup := &service.RecipientGroup{ + Common: common, + CampaignRepository: repositories.Campaign, + CampaignRecipientRepository: repositories.CampaignRecipient, + RecipientGroupRepository: repositories.RecipientGroup, + RecipientRepository: repositories.Recipient, + RecipientService: recipient, + DB: db, + } + webhook := &service.Webhook{ + Common: common, + CampaignRepository: repositories.Campaign, + WebhookRepository: repositories.Webhook, + } + campaignTemplate := &service.CampaignTemplate{ + Common: common, + CampaignTemplateRepository: repositories.CampaignTemplate, + CampaignRepository: repositories.Campaign, + IdentifierRepository: repositories.Identifier, + } + apiSender := &service.APISender{ + Common: common, + APISenderRepository: repositories.APISender, + TemplateService: templateService, + CampaignTemplateService: campaignTemplate, + } + smtpConfiguration := &service.SMTPConfiguration{ + Common: common, + SMTPConfigurationRepository: repositories.SMTPConfiguration, + CampaignTemplateService: campaignTemplate, + } + page := &service.Page{ + Common: common, + CampaignRepository: repositories.Campaign, + PageRepository: repositories.Page, + CampaignTemplateService: campaignTemplate, + } + domain := &service.Domain{ + Common: common, + OwnManagedCertificatePath: ownManagedCertificatePath, + CertMagicConfig: certMagicConfig, + CertMagicCache: certMagicCache, + DomainRepository: repositories.Domain, + CompanyRepository: repositories.Company, + CampaignTemplateService: campaignTemplate, + AssetService: asset, + FileService: file, + } + email := &service.Email{ + Common: common, + AttachmentPath: attachmentPath, + AttachmentService: attachment, + DomainService: domain, + EmailRepository: repositories.Email, + SMTPService: smtpConfiguration, + RecipientService: recipient, + TemplateService: templateService, + } + campaign := &service.Campaign{ + Common: common, + CampaignRepository: repositories.Campaign, + CampaignRecipientRepository: repositories.CampaignRecipient, + RecipientRepository: repositories.Recipient, + RecipientGroupRepository: repositories.RecipientGroup, + AllowDenyRepository: repositories.AllowDeny, + WebhookRepository: repositories.Webhook, + CampaignTemplateService: campaignTemplate, + DomainService: domain, + RecipientService: recipient, + MailService: email, + APISenderService: apiSender, + SMTPConfigService: smtpConfiguration, + WebhookService: webhook, + TemplateService: templateService, + AttachmentPath: attachmentPath, + } + allowDeny := &service.AllowDeny{ + Common: common, + AllowDenyRepository: repositories.AllowDeny, + CampaignRepository: repositories.Campaign, + } + identifier := &service.Identifier{ + Common: common, + IdentifierRepository: repositories.Identifier, + } + companyService := &service.Company{ + Common: common, + DomainService: domain, + PageService: page, + EmailService: email, + SMTPConfigurationService: smtpConfiguration, + APISenderService: apiSender, + RecipientService: recipient, + RecipientGroupService: recipientGroup, + CampaignService: campaign, + CampaignTemplate: campaignTemplate, + AllowDenyService: allowDeny, + WebhookService: webhook, + CompanyRepository: repositories.Company, + } + versionService := &service.Version{Common: common} + ssoService := &service.SSO{ + Common: common, + OptionsService: optionService, + UserService: userService, + SessionService: sessionService, + // MSALClient: msalClient, this dependency is set AFTER this function + } + updateService := &service.Update{ + Common: common, + OptionService: optionService, + } + importService := &service.Import{ + Common: common, + Asset: asset, + Page: page, + Email: email, + File: file, + EmailRepository: repositories.Email, + PageRepository: repositories.Page, + } + + return &Services{ + Asset: asset, + Attachment: attachment, + Company: companyService, + File: file, + InstallSetup: installSetup, + Option: optionService, + Page: page, + Session: sessionService, + User: userService, + Domain: domain, + Recipient: recipient, + RecipientGroup: recipientGroup, + SMTPConfiguration: smtpConfiguration, + Email: email, + Template: templateService, + CampaignTemplate: campaignTemplate, + Campaign: campaign, + APISender: apiSender, + AllowDeny: allowDeny, + Webhook: webhook, + Identifier: identifier, + Version: versionService, + SSO: ssoService, + Update: updateService, + Import: importService, + } +} diff --git a/backend/app/utils.go b/backend/app/utils.go new file mode 100644 index 0000000..093b53b --- /dev/null +++ b/backend/app/utils.go @@ -0,0 +1,23 @@ +package app + +import ( + "github.com/phishingclub/phishingclub/api" + "github.com/phishingclub/phishingclub/cli" + "github.com/phishingclub/phishingclub/password" +) + +// Utilities is a collection of utils +type Utilities struct { + CLIOutputter cli.Outputter + PasswordHasher *password.Argon2Hasher + JSONResponseHandler api.JSONResponseHandler +} + +// NewUtils creates a collection of utils +func NewUtils() *Utilities { + return &Utilities{ + CLIOutputter: cli.NewCLIOutputter(), + PasswordHasher: password.NewHasherWithDefaultValues(), + JSONResponseHandler: api.NewJSONResponseHandler(), + } +} diff --git a/backend/build/flags.go b/backend/build/flags.go new file mode 100644 index 0000000..71fa92a --- /dev/null +++ b/backend/build/flags.go @@ -0,0 +1,10 @@ +package build + +type flags struct { + Production bool +} + +// Flags is a global variable for build flags +var Flags = flags{ + Production: false, +} diff --git a/backend/build/production.go b/backend/build/production.go new file mode 100644 index 0000000..4073666 --- /dev/null +++ b/backend/build/production.go @@ -0,0 +1,7 @@ +//go:build production + +package build + +func init() { + Flags.Production = true +} diff --git a/backend/build_scripts/build.sh b/backend/build_scripts/build.sh new file mode 100755 index 0000000..13e3c89 --- /dev/null +++ b/backend/build_scripts/build.sh @@ -0,0 +1,32 @@ +#!/bin/sh +echo "### Building frontend" +# remove any old builds +rm -rf phishingclub/frontend/frontend/build +mkdir -p phishingclub/frontend/frontend/build + +sudo docker run --rm \ +-v "$(pwd)":/app \ +-w /app/phishingclub/frontend \ +node:alpine \ +sh -c "npm ci && npm run build-production" + +# Get current user and group IDs +USER_ID=$(id -u) +GROUP_ID=$(id -g) + +sudo chown -R $USER_ID:$GROUP_ID phishingclub/frontend/build +sudo mv phishingclub/frontend/build ./phishingclub/frontend/frontend/ + +echo "### Building backend" +HASH=$(git rev-parse --short HEAD) +echo "Building with hash: $HASH" + + +echo "building..." +sudo docker run --rm \ +-v "$(pwd)":/app \ +-w /app/phishingclub/frontend \ +golang:alpine \ +go build -trimpath \ +-ldflags="-X github.com/phishingclub/phishingclub/version.hash=ph$HASH" \ +-tags production -o ../build/phishingclub main.go diff --git a/backend/build_scripts/build_backend.sh b/backend/build_scripts/build_backend.sh new file mode 100755 index 0000000..2da4de4 --- /dev/null +++ b/backend/build_scripts/build_backend.sh @@ -0,0 +1,13 @@ + +#!/bin/sh + +HASH=$(git rev-parse --short HEAD) +echo "Building backend with hash: $HASH" + +sudo docker run --rm \ +-v "$(pwd)":/app \ +-w /app/phishingclub/frontend \ +golang \ +go build -trimpath \ + -ldflags="-X github.com/phishingclub/phishingclub/version.hash=ph$HASH" \ + -tags production -o ../build/phishingclub main.go diff --git a/backend/build_scripts/build_frontend.sh b/backend/build_scripts/build_frontend.sh new file mode 100755 index 0000000..35a6044 --- /dev/null +++ b/backend/build_scripts/build_frontend.sh @@ -0,0 +1,8 @@ +#!/bin/sh + +echo "### Building frontend" +sudo docker run --rm \ + -v "$(pwd)":/app \ + -w /app/phishingclub/frontend \ + node:alpine \ + sh -c "npm ci && npm run build-production" diff --git a/backend/build_scripts/build_release.sh b/backend/build_scripts/build_release.sh new file mode 100755 index 0000000..57245b2 --- /dev/null +++ b/backend/build_scripts/build_release.sh @@ -0,0 +1,66 @@ +#!/bin/bash +set -e + +# Get the current version from the VERSION file +VERSION=$(cat phishingclub/frontend/version/VERSION | tr -d '\n\r ') + +# Check if version is valid +if [[ ! $VERSION =~ ^[0-9]+\.[0-9]+\.[0-9]+$ ]]; then + echo "Error: Invalid version format. Expected semver format (e.g., 0.9.0)" + exit 1 +fi + +# Get current git hash +GIT_HASH=$(git rev-parse --short HEAD) + +# Create build directory +mkdir -p build + +# Prompt for confirmation +echo "Ready to build and tag release v$VERSION ($GIT_HASH)" +read -p "Continue? (y/n): " -n 1 -r +echo +if [[ ! $REPLY =~ ^[Yy]$ ]]; then + echo "Operation cancelled" + exit 1 +fi + +# Build frontend +echo "Building frontend..." +# remove any old builds +rm -rf phishingclub/frontend/frontend/build +mkdir -p phishingclub/frontend/frontend/build + +sudo docker run --rm \ +-v "$(pwd)":/app \ +-w /app/phishingclub/frontend \ +node:alpine \ +sh -c "npm ci && npm run build-production" + +# Get current user and group IDs +USER_ID=$(id -u) +GROUP_ID=$(id -g) + +sudo chown -R $USER_ID:$GROUP_ID phishingclub/frontend/build +mv phishingclub/frontend/build ./phishingclub/frontend/frontend/ + +# Build the application +echo "Building application..." +sudo docker run --rm \ +-v "$(pwd)":/app \ +-w /app/phishingclub/frontend \ +golang:alpine \ +go build -trimpath \ +-ldflags="-X github.com/phishingclub/phishingclub/version.hash=ph$GIT_HASH" \ +-tags production -o ../build/phishingclub_${VERSION} main.go + +echo "Build completed successfully: build/phishingclub_${VERSION}" + + +echo "Build completed successfully!" +echo "Created files:" +ls -lh build/ +cd .. + + +echo "Release tagged as v$VERSION" diff --git a/backend/build_scripts/generate_licenses.sh b/backend/build_scripts/generate_licenses.sh new file mode 100755 index 0000000..6be3132 --- /dev/null +++ b/backend/build_scripts/generate_licenses.sh @@ -0,0 +1,31 @@ +#!/bin/bash + +# Exit on any error +set -e + +echo "Generating licenses..." + +# Create temp directory if it doesn't exist +mkdir -p /tmp/licenses + +# Generate backend licenses +echo "Generating backend licenses..." +sudo docker compose exec -T backend bash -c "go install github.com/google/go-licenses@latest && \ + go-licenses report --ignore github.com/phishingclub/phishingclub --template ./utils/ossTemplate.tpl ./... > /tmp/backend-licenses.md 2> /dev/null" +sudo docker compose cp backend:/tmp/backend-licenses.md /tmp/licenses/ + +# Generate frontend licenses +echo "Generating frontend licenses..." +sudo docker compose exec -T frontend bash -c "npm run --silent license-report > /tmp/frontend-licenses.json 2>/dev/null" +sudo docker compose cp frontend:/tmp/frontend-licenses.json /tmp/licenses/ + +# Combine licenses +echo "Combining licenses..." +cat /tmp/licenses/backend-licenses.md > phishingclub/frontend/static/licenses.txt +echo -e "\n\n" >> phishingclub/frontend/static/licenses.txt +cat /tmp/licenses/frontend-licenses.json >> phishingclub/frontend/static/licenses.txt + +# Cleanup +rm -rf /tmp/licenses + +echo "License file generated at phishingclub/frontend/static/licenses.txt" diff --git a/backend/build_scripts/generate_test_cert.sh b/backend/build_scripts/generate_test_cert.sh new file mode 100755 index 0000000..8e3d552 --- /dev/null +++ b/backend/build_scripts/generate_test_cert.sh @@ -0,0 +1,6 @@ +#!/bin/bash +# generate private key +openssl genrsa -out test.key 2048 +# generate self-signed certificate +# generate certificate +openssl req -new -x509 -key test.key -out test.pem -days 365 diff --git a/backend/cache/local.go b/backend/cache/local.go new file mode 100644 index 0000000..a123c5c --- /dev/null +++ b/backend/cache/local.go @@ -0,0 +1,80 @@ +package cache + +import ( + "sync/atomic" + + "github.com/google/uuid" + "github.com/phishingclub/phishingclub/data" +) + +// EventIDByName is a map of event names to event IDs +var EventIDByName = map[string]*uuid.UUID{} + +// EventNameByID is a map of event ids to names +// this is not safe before the API is up an running entirely +var EventNameByID = map[string]string{} + +var isUpdateAvailable atomic.Bool + +func init() { + for _, name := range data.Events { + EventIDByName[name] = nil + } + isUpdateAvailable.Store(false) +} + +func SetUpdateAvailable(updateAvailable bool) { + isUpdateAvailable.Store(updateAvailable) +} + +func IsUpdateAvailable() bool { + return isUpdateAvailable.Load() +} + +// TODO all priority event functions should be in utils or something, and the priority in the data package. +// var CampaignEventPriority = map[] +// Add priority rankings (higher number = higher priority) +// readonly +var CampaignEventPriority = map[string]int{ + // campaign recipient events + data.EVENT_CAMPAIGN_RECIPIENT_CANCELLED: 80, + data.EVENT_CAMPAIGN_RECIPIENT_SUBMITTED_DATA: 70, + data.EVENT_CAMPAIGN_RECIPIENT_AFTER_PAGE_VISITED: 60, + data.EVENT_CAMPAIGN_RECIPIENT_BEFORE_PAGE_VISITED: 40, + data.EVENT_CAMPAIGN_RECIPIENT_PAGE_VISITED: 50, + data.EVENT_CAMPAIGN_RECIPIENT_MESSAGE_READ: 30, + data.EVENT_CAMPAIGN_RECIPIENT_MESSAGE_FAILED: 20, + data.EVENT_CAMPAIGN_RECIPIENT_MESSAGE_SENT: 20, + data.EVENT_CAMPAIGN_RECIPIENT_SCHEDULED: 10, + // campaign events + data.EVENT_CAMPAIGN_CLOSED: 30, + data.EVENT_CAMPAIGN_ACTIVE: 20, + data.EVENT_CAMPAIGN_SELF_MANAGED: 20, + data.EVENT_CAMPAIGN_SCHEDULED: 10, +} + +// IsMoreNotableCampaignRecipientEvent returns true if newEvent is more notable than currentEvent +func IsMoreNotableCampaignRecipientEvent(newEvent, currentEvent string) bool { + newPriority, newExists := CampaignEventPriority[newEvent] + currentPriority, currentExists := CampaignEventPriority[currentEvent] + + // If either event doesn't exist in our priority map, treat it as lowest priority + if !newExists || !currentExists { + return false + } + + return newPriority > currentPriority +} +func IsMoreNotableCampaignRecipientEventID(currentID, newID *uuid.UUID) bool { + if currentID == nil || currentID.String() == uuid.Nil.String() { + return true + } + if newID == nil { + return false + } + + newEventName := EventNameByID[newID.String()] + currentEventName := EventNameByID[currentID.String()] + + return IsMoreNotableCampaignRecipientEvent(newEventName, currentEventName) +} diff --git a/backend/cli/env.go b/backend/cli/env.go new file mode 100644 index 0000000..aa919fc --- /dev/null +++ b/backend/cli/env.go @@ -0,0 +1,17 @@ +package cli + +import ( + "fmt" +) + +// OutputEnv outputs the available environment variables +// These are used for CI or similar enviroment tests +func OutputEnv() { + fmt.Println("Available environment variables:") + fmt.Println("APP_MODE = production, development, integration_test") + fmt.Println("TEST_DB_LOG_LEVEL = silent, debug, error, warn, info") + fmt.Println("HTTP_PROXY - sets outgoing http proxy") + fmt.Println("HTTPS_PROXY - sets outgoing https proxy") + fmt.Println("NO_PROXY - hosts that should not be proxied") + +} diff --git a/backend/cli/info.go b/backend/cli/info.go new file mode 100644 index 0000000..11fa8bd --- /dev/null +++ b/backend/cli/info.go @@ -0,0 +1,44 @@ +package cli + +import ( + "fmt" + + "github.com/fatih/color" +) + +// PrintVersion outputs the version of the application +func PrintVersion( + name, + version string, +) { + fmt.Printf("%s (%s)\n", name, version) +} + +// PrintBanner outputs the banner for the application +func PrintBanner() { + blue := color.New(color.FgBlue) + _, _ = blue.Println(` + + --: + .@@@@@*-. + .@@@@@@@@++: + .+*=. .@@@@@@@@@@@@*-. + +@@@@++- .+@@@@@@@@@@@@@@#=: + *@@@@@@@@#=. .=#@@@@@@@@@@@@@@@+*- + *@@@@@@@@@@@+- :#@@@@@@@@@@@@@@@@#. + *@@@@@@@@@@@@= +@@@@@@@@@@@@@@@@@= + *@@@@@@@@++: .=#@@@@@@@@@@@@@@@@++: + *@@@@@*=. .+@@@@@@@@@@@@@@@@#=. + .*#+: .@@@@@@@@@@@@@+*- + .@@@@@@@@@@#=. + .@@@@@@+*- + ++@#=. `) + _, _ = fmt.Println() + _, _ = fmt.Println() +} +func PrintServerStarted( + name string, + address string, +) { + fmt.Printf("%s available:\nhttps://%s\n\n", name, address) +} diff --git a/backend/cli/outputter.go b/backend/cli/outputter.go new file mode 100644 index 0000000..7343587 --- /dev/null +++ b/backend/cli/outputter.go @@ -0,0 +1,37 @@ +package cli + +import ( + "github.com/fatih/color" +) + +type Outputter interface { + PrintInitialAdminAccount(username, password string) +} + +type cliOutputter struct { + color *color.Color +} + +// NewCLIOutputter creates a new CLIOutputter +func NewCLIOutputter() Outputter { + return &cliOutputter{ + color: color.New(), + } +} + +func (c *cliOutputter) PrintInitialAdminAccount( + username, + password string, +) { + bold := color.New(color.Bold) + italic := color.New(color.Bold) + _, _ = italic.Println("One time credentials for account setup") + _, _ = c.color.Println() + _, _ = c.color.Print("Username: ") + _, _ = bold.Println(username) + _, _ = c.color.Printf("Password: ") + _, _ = bold.Println(password) + _, _ = bold.Println() + _, _ = c.color.Println() + c.color.DisableColor() +} diff --git a/backend/config.docker.json b/backend/config.docker.json new file mode 100644 index 0000000..405edea --- /dev/null +++ b/backend/config.docker.json @@ -0,0 +1,21 @@ +{ + "acme": { + "email": "" + }, + "administration": { + "tls_host": "phish.test", + "tls_auto": false, + "tls_cert_path": ".dev/certs/self-signed/admin-public.pem", + "tls_key_path": ".dev/certs/self-signed/admin-private.pem", + "address": "0.0.0.0:8002" + }, + "phishing": { + "http": "0.0.0.0:8000", + "https": "0.0.0.0:8001" + }, + "database": { + "engine": "sqlite3", + "dsn": "file:/app/.dev/db.sqlite3" + }, + "ip_allow_list": [] +} diff --git a/backend/config.example.json b/backend/config.example.json new file mode 100644 index 0000000..b4bb70c --- /dev/null +++ b/backend/config.example.json @@ -0,0 +1,25 @@ +{ + "acme": { + "email": "" + }, + "administration": { + "tls_host": "phish.test", + "tls_auto": false, + "tls_cert_path": "certs/admin/public.pem", + "tls_key_path": "certs/admin/private.pem", + "address": "127.0.0.1:8002" + }, + "phishing": { + "http": "127.0.0.1:8000", + "https": "127.0.0.1:8001" + }, + "database": { + "engine": "sqlite3", + "dsn": "file:./db.sqlite3" + }, + "log": { + "path": "", + "errorPath": "" + }, + "ip_allow_list": [] +} diff --git a/backend/config/config.go b/backend/config/config.go new file mode 100644 index 0000000..378cbf3 --- /dev/null +++ b/backend/config/config.go @@ -0,0 +1,522 @@ +package config + +import ( + "encoding/json" + "fmt" + "io/fs" + "net" + "os" + "strconv" + + "github.com/go-errors/errors" + + "github.com/phishingclub/phishingclub/data" + "github.com/phishingclub/phishingclub/errs" + "github.com/phishingclub/phishingclub/file" +) + +var ( + ErrMissingIP = errors.New("missing IP") + ErrMissingPort = errors.New("missing port") + ErrMissingDatabaseDSN = errors.New("missing database DSN") + ErrInvalidIP = errors.New("invalid IP") + ErrInvalidPort = errors.New("invalid port") + ErrInvalidDatabase = errors.New("invalid database") + ErrWriterIsNil = errors.New("writer is nil") +) + +const ( + DefaultACMEEmail = "" + DefaultDevACMEEmail = "" + + DatabaseUsePostgres = "postgres" + DefaultAdministrationUseSqlite = "sqlite3" + DefaultDatabase = DefaultAdministrationUseSqlite + DefaultAdministrationDSN = "file:./db.sqlite3" + + DefaultDevAdministrationPort = 0 // 0 uses ephemeral port, random available port + DefaultDevHTTPPhishingPort = 8080 + DefaultDevHTTPSPhishingPort = 8443 + + DefaultProductionAdministrationPort = 0 // 0 uses ephemeral port, random available port + DefaultProductionHTTPPhishingPort = 80 + DefaultProductionHTTPSPhishingPort = 443 + + // empty is none + DefaultLogFilePath = "" + DefaultErrLogFilePath = "" + + DefaultTrustedIPHeader = "" + + DefaultAdminHost = "" + DefaultAdminAutoTLS = true + DefaultAdminAutoTLSString = "true" +) + +var ( + defaultTrustedProxies = []string{} + defaultAdminAllowed = []string{} +) + +type ( + // Config config + Config struct { + acme ACME + + tlsHost string + tlsAuto bool + + tlsCertPath string + tlsKeyPath string + + adminNetAddress net.TCPAddr + phishingHTTPNetAddress net.TCPAddr + phishingHTTPSNetAddress net.TCPAddr + database Database + fileWriter file.Writer + + LogPath string + ErrLogPath string + + IPSecurity IPSecurityConfig + } + + // ConfigDTO config DTO + ConfigDTO struct { + ACME ACME `json:"acme"` + AdministrationServer AdministrationServer `json:"administration"` + PhishingServer PhishingServer `json:"phishing"` + Database Database `json:"database"` + Log Log `json:"log"` + IPSecurity IPSecurityConfig `json:"ip_security"` + } + + Log struct { + Path string `json:"path"` + ErrorPath string `json:"errorPath"` + } + + // AdministrationServer ConfigDTO administration + AdministrationServer struct { + TLSHost string `json:"tls_host"` + TLSAuto bool `json:"tls_auto"` + TLSCertPath string `json:"tls_cert_path"` + TLSKeyPath string `json:"tls_key_path"` + Address string `json:"address"` + AllowList []string `json:"ip_allow_list"` + } + + // PhishingServer ConfigDTO phishing + PhishingServer struct { + Http string `json:"http"` + Https string `json:"https"` + } + + // Database ConfigDTO database + Database struct { + Engine string `json:"engine"` + DSN string `json:"dsn"` + } + + // ACME ConfigDTO acme + ACME struct { + Email string `json:"email"` + } +) + +type IPSecurityConfig struct { + // ip/cidr that are allowed to access the admin interface + AdminAllowed []string `json:"admin_allowed"` + + // ip/cidr of legitimate reverse proxies (e.g., Nginx, HAProxy, Cloudflare edges) + TrustedProxies []string `json:"trusted_proxies"` + + // headers to check for real client IP + // examples: CF-Connecting-IP, X-Real-IP, True-Client-IP, X-Forwarded-For + TrustedIPHeader string `json:"trusted_ip_header"` +} + +// ValidateFileWriter validates the file writer +func ValidateFileWriter(fileWriter file.Writer) error { + if fileWriter == nil { + return ErrWriterIsNil + } + return nil +} + +// NewConfig factory +func NewConfig( + acmeEmail string, + tlsHost string, + tlsAuto bool, + adminPublicCertPath string, + adminPrivateCertKey string, + adminAddress string, + phishingHTTPAddress string, + phishingHTTPSAddress string, + database Database, + fileWriter file.Writer, + logPath string, + errLogPath string, + ipSecurity IPSecurityConfig, +) (*Config, error) { + if err := ValidateFileWriter(fileWriter); err != nil { + return nil, errs.Wrap(err) + } + adminNetAddress, err := StringAddressToTCPAddr(adminAddress) + if err != nil { + return nil, errs.Wrap(err) + } + phishingHTTPNetAddress, err := StringAddressToTCPAddr(phishingHTTPAddress) + if err != nil { + return nil, errs.Wrap(err) + } + phishingHTTPSNetAddress, err := StringAddressToTCPAddr(phishingHTTPSAddress) + if err != nil { + return nil, errs.Wrap(err) + } + switch database.Engine { + case DatabaseUsePostgres: + case DefaultAdministrationUseSqlite: + default: + return nil, ErrInvalidDatabase + } + + return &Config{ + acme: ACME{ + Email: acmeEmail, + }, + tlsHost: tlsHost, + tlsAuto: tlsAuto, + tlsCertPath: adminPublicCertPath, + tlsKeyPath: adminPrivateCertKey, + adminNetAddress: *adminNetAddress, + phishingHTTPNetAddress: *phishingHTTPNetAddress, + phishingHTTPSNetAddress: *phishingHTTPSNetAddress, + database: Database{ + Engine: database.Engine, + DSN: database.DSN, + }, + fileWriter: &file.FileWriter{}, + LogPath: logPath, + ErrLogPath: errLogPath, + IPSecurity: ipSecurity, + }, nil +} + +// NewDevDefaultConfig returns a default config +func NewDevDefaultConfig() *Config { + tlsHost := "phish.test" + tlsAuto := false + publicCertPath := fmt.Sprintf( + "%s/%s", + data.DefaultAdminCertDir, + data.DefaultAdminPublicCertFileName, + ) + privateCertPath := fmt.Sprintf( + "%s/%s", + data.DefaultAdminCertDir, + data.DefaultAdminPrivateCertFileName, + ) + return &Config{ + acme: ACME{ + Email: DefaultACMEEmail, + }, + tlsHost: tlsHost, + tlsAuto: tlsAuto, + tlsCertPath: publicCertPath, + tlsKeyPath: privateCertPath, + adminNetAddress: net.TCPAddr{ + IP: net.IPv4(0, 0, 0, 0), + Port: DefaultDevAdministrationPort, + }, + phishingHTTPNetAddress: net.TCPAddr{ + IP: net.IPv4(0, 0, 0, 0), + Port: DefaultDevHTTPPhishingPort, + }, + phishingHTTPSNetAddress: net.TCPAddr{ + IP: net.IPv4(0, 0, 0, 0), + Port: DefaultDevHTTPSPhishingPort, + }, + database: Database{ + Engine: DefaultAdministrationUseSqlite, + DSN: DefaultAdministrationDSN, + }, + fileWriter: &file.FileWriter{}, + LogPath: DefaultLogFilePath, + ErrLogPath: DefaultErrLogFilePath, + IPSecurity: IPSecurityConfig{ + AdminAllowed: []string{}, + TrustedProxies: []string{}, + TrustedIPHeader: "", + }, + } +} + +// NewDevDefaultConfig returns a default config +func NewProductionDefaultConfig() *Config { + tlsHost := "localhost" + tlsAuto := DefaultAdminAutoTLS + publicCertPath := fmt.Sprintf( + "%s/%s", + data.DefaultAdminCertDir, + data.DefaultAdminPublicCertFileName, + ) + privateCertPath := fmt.Sprintf( + "%s/%s", + data.DefaultAdminCertDir, + data.DefaultAdminPrivateCertFileName, + ) + return &Config{ + acme: ACME{ + Email: DefaultACMEEmail, + }, + tlsHost: tlsHost, + tlsAuto: tlsAuto, + tlsCertPath: publicCertPath, + tlsKeyPath: privateCertPath, + adminNetAddress: net.TCPAddr{ + IP: net.IPv4(0, 0, 0, 0), + Port: DefaultProductionAdministrationPort, + }, + phishingHTTPNetAddress: net.TCPAddr{ + IP: net.IPv4(0, 0, 0, 0), + Port: DefaultProductionHTTPPhishingPort, + }, + phishingHTTPSNetAddress: net.TCPAddr{ + IP: net.IPv4(0, 0, 0, 0), + Port: DefaultProductionHTTPSPhishingPort, + }, + + database: Database{ + Engine: DefaultAdministrationUseSqlite, + DSN: DefaultAdministrationDSN, + }, + fileWriter: &file.FileWriter{}, + IPSecurity: IPSecurityConfig{ + AdminAllowed: []string{}, + TrustedProxies: []string{}, + TrustedIPHeader: "", + }, + } +} + +// ACMEEmail returns the acme email +func (c *Config) ACMEEmail() string { + return c.acme.Email +} + +// SetACMEEmail sets the acme email +func (c *Config) SetACMEEmail(email string) { + c.acme.Email = email +} + +// TLSHost returns the host to use for admin server +func (c *Config) TLSHost() string { + return c.tlsHost +} + +// TLSAuto returns if ACME service should handle TLS for the admin server +func (c *Config) TLSAuto() bool { + return c.tlsAuto +} + +// TLSCertPath returns the cert path +func (c *Config) TLSCertPath() string { + return c.tlsCertPath +} + +// TLSKeyPath returns the private key +func (c *Config) TLSKeyPath() string { + return c.tlsKeyPath +} + +// SetTLSCertPath returns the admin host +func (c *Config) SetTLSHost(host string) { + c.tlsHost = host +} + +// SetTLSAuto sets if a ACME service should handle TLS for the admin server +func (c *Config) SetTLSAuto(auto bool) { + c.tlsAuto = auto +} + +// SetAdminNetAddress sets the administration network address +func (c *Config) SetAdminNetAddress(adminNetAddress string) error { + newAddr, err := StringAddressToTCPAddr(adminNetAddress) + if err != nil { + return err + } + c.adminNetAddress = *newAddr + return nil +} + +// SetPhishingHTTPNetAddress sets the phishing network address +func (c *Config) SetPhishingHTTPNetAddress(addr string) error { + newAddr, err := StringAddressToTCPAddr(addr) + if err != nil { + return err + } + c.phishingHTTPNetAddress = *newAddr + return nil +} + +// SetPhishingHTTPNetAddress sets the phishing network address +func (c *Config) SetPhishingHTTPSNetAddress(addr string) error { + newAddr, err := StringAddressToTCPAddr(addr) + if err != nil { + return err + } + c.phishingHTTPSNetAddress = *newAddr + return nil +} + +// SetFileWriter sets the file writer +func (c *Config) SetFileWriter(fileWriter file.Writer) error { + if err := ValidateFileWriter(fileWriter); err != nil { + return fmt.Errorf("failed to set file writer on config: %w", err) + } + c.fileWriter = fileWriter + return nil +} + +// Write writes the config to a writer +func (c *Config) WriteToFile(filepath string) error { + dto := c.ToDTO() + conf, err := json.MarshalIndent(dto, "", " ") + if err != nil { + return fmt.Errorf("failed to marshal config: %w", err) + } + + // Write the content to the writer + if _, err := c.fileWriter.Write(filepath, conf, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0644); err != nil { + return fmt.Errorf("failed to write config: %w", err) + } + + return nil +} + +// StringAddressToTCPAddr converts a string address to a TCPAddr +func StringAddressToTCPAddr(address string) (*net.TCPAddr, error) { + host, port, err := net.SplitHostPort(address) + if err != nil { + return nil, errs.Wrap(err) + } + ip := net.ParseIP(host) + if ip == nil { + return nil, ErrInvalidIP + } + // convert port to int + p, err := strconv.Atoi(port) + if err != nil { + return nil, errs.Wrap(err) + } + if p < 0 || p > 65535 { + return nil, ErrInvalidPort + } + return &net.TCPAddr{ + IP: ip, + Port: p, + }, nil +} + +// FromMap creates a *Config from a DTO +func FromDTO(dto *ConfigDTO) (*Config, error) { + return NewConfig( + dto.ACME.Email, + dto.AdministrationServer.TLSHost, + dto.AdministrationServer.TLSAuto, + dto.AdministrationServer.TLSCertPath, + dto.AdministrationServer.TLSKeyPath, + dto.AdministrationServer.Address, + dto.PhishingServer.Http, + dto.PhishingServer.Https, + dto.Database, + file.FileWriter{}, + dto.Log.Path, + dto.Log.ErrorPath, + dto.IPSecurity, + ) +} + +// ToDTO converts a *Config to a *ConfigDTO +func (c *Config) ToDTO() *ConfigDTO { + allowList := make([]string, 0) + + return &ConfigDTO{ + ACME: ACME{ + Email: c.acme.Email, + }, + AdministrationServer: AdministrationServer{ + TLSHost: c.TLSHost(), + TLSAuto: c.TLSAuto(), + TLSCertPath: c.TLSCertPath(), + TLSKeyPath: c.TLSKeyPath(), + Address: c.AdminNetAddress(), + AllowList: allowList, + }, + PhishingServer: PhishingServer{ + Http: c.phishingHTTPNetAddress.String(), + Https: c.phishingHTTPSNetAddress.String(), + }, + Database: Database{ + Engine: c.database.Engine, + DSN: c.database.DSN, + }, + Log: Log{ + Path: c.LogPath, + ErrorPath: c.ErrLogPath, + }, + IPSecurity: c.IPSecurity, + } +} + +// AdminNetAddress returns the administration network address +func (c *Config) AdminNetAddress() string { + return c.adminNetAddress.String() +} + +// AdminNetAddressPort returns the administration network address port +func (c *Config) AdminNetAddressPort() int { + return c.adminNetAddress.Port +} + +// PhishingHTTPNetAddress returns the phishing network address +func (c *Config) PhishingHTTPNetAddress() string { + return c.phishingHTTPNetAddress.String() +} + +// PhishingHTTPNetAddressPort returns the phishing network address port +func (c *Config) PhishingHTTPNetAddressPort() int { + return c.phishingHTTPNetAddress.Port +} + +// PhishingHTTPSNetAddress returns the phishing network address +func (c *Config) PhishingHTTPSNetAddress() string { + return c.phishingHTTPSNetAddress.String() +} + +// PhishingHTTPSNetAddressPort returns the phishing network address port +func (c *Config) PhishingHTTPSNetAddressPort() int { + return c.phishingHTTPSNetAddress.Port +} + +// Database returns the database +func (c *Config) Database() Database { + return c.database +} + +// NewDTOFromFile creates a *ConfigDTO from a file +func NewDTOFromFile(filesystem fs.FS, path string) (*ConfigDTO, error) { + var conf ConfigDTO + f, err := filesystem.Open(path) + if err != nil { + return nil, errs.Wrap(err) + } + dec := json.NewDecoder(f) + err = dec.Decode(&conf) + if err != nil { + return nil, errs.Wrap(err) + } + return &conf, nil +} diff --git a/backend/config/config_test.go b/backend/config/config_test.go new file mode 100644 index 0000000..1ea94a6 --- /dev/null +++ b/backend/config/config_test.go @@ -0,0 +1,455 @@ +package config + +import ( + "encoding/json" + "fmt" + "io/fs" + "net" + "os" + "reflect" + "testing" + "testing/fstest" + + "github.com/go-errors/errors" + + "github.com/phishingclub/phishingclub/data" + "github.com/phishingclub/phishingclub/file" + "github.com/phishingclub/phishingclub/file/filemock" +) + +const ( + DEFAULT_ADMIN_ADDR = "127.0.0.1:8002" + DEFAULT_PHISHING_HTTP_ADDR = "127.0.0.1:8000" + DEFAULT_PHISHING_HTTPS_ADDR = "127.0.0.1:8001" + DEFAULT_ACME_EMAIL = "" +) + +var ( + adminHost = "phish.test" + adminTLS = false + adminPublicCertPath = fmt.Sprintf( + "%s/%s", + data.DefaultAdminCertDir, + data.DefaultAdminPublicCertFileName, + ) + adminPrivateCertPath = fmt.Sprintf( + "%s/%s", + data.DefaultAdminCertDir, + data.DefaultAdminPrivateCertFileName, + ) + + configFileOK = []byte(`{ + "administration": { + "address": "127.0.0.1:4000" + } +}`) + configFileEmpty = []byte("{") + databaseOK = Database{ + Engine: DefaultAdministrationUseSqlite, + DSN: DefaultAdministrationDSN, + } +) + +func newTestConfig() *Config { + return &Config{ + acme: ACME{ + Email: DEFAULT_ACME_EMAIL, + }, + tlsCertPath: adminPublicCertPath, + tlsKeyPath: adminPrivateCertPath, + adminNetAddress: net.TCPAddr{ + IP: net.IPv4(127, 0, 0, 1), + Port: DefaultDevAdministrationPort, + }, + phishingHTTPNetAddress: net.TCPAddr{ + IP: net.IPv4(127, 0, 0, 1), + Port: DefaultDevHTTPPhishingPort, + }, + phishingHTTPSNetAddress: net.TCPAddr{ + IP: net.IPv4(127, 0, 0, 1), + Port: DefaultDevHTTPSPhishingPort, + }, + database: databaseOK, + fileWriter: &filemock.Writer{}, + } +} + +func TestNewConfig(t *testing.T) { + t.Run("happy path", testNewConfigHappyPath) + t.Run("invalid administration address and port split", testNewConfigInvalidAdministrationAddress) + t.Run("invalid administration ip", testNewConfigInvalidAdministrationIP) + t.Run("invalid administration port", testNewConfigInvalidAdministrationPort) + t.Run("invalid administration port string", testNewConfigInvalidAdministrationPortString) + t.Run("invalid database", testNewConfigInvalidDatabase) + t.Run("writer with nil", testNewConfigWithNilWriter) + +} + +func testNewConfigWithNilWriter(t *testing.T) { + _, err := NewConfig( + DEFAULT_ACME_EMAIL, + adminHost, + adminTLS, + adminPublicCertPath, + adminPrivateCertPath, + "127.0.0.1:8080", + DEFAULT_PHISHING_HTTP_ADDR, + DEFAULT_PHISHING_HTTPS_ADDR, + databaseOK, + nil, + "", + "", + + IPSecurityConfig{ + AdminAllowed: defaultAdminAllowed, + TrustedProxies: defaultTrustedProxies, + TrustedIPHeader: DefaultTrustedIPHeader, + }, + ) + if err == nil { + if !errors.Is(err, ErrWriterIsNil) { + t.Error("expected ErrWriterIsNil error from nil writer") + } + t.Error("expected error from nil writer") + return + } +} + +func testNewConfigInvalidAdministrationAddress(t *testing.T) { + _, err := NewConfig( + DEFAULT_ACME_EMAIL, + adminHost, + adminTLS, + adminPublicCertPath, + adminPrivateCertPath, + "foobar", + DEFAULT_PHISHING_HTTP_ADDR, + DEFAULT_PHISHING_HTTPS_ADDR, + databaseOK, + &filemock.Writer{}, + "", + "", + IPSecurityConfig{ + AdminAllowed: defaultAdminAllowed, + TrustedProxies: defaultTrustedProxies, + TrustedIPHeader: DefaultTrustedIPHeader, + }, + ) + if err == nil { + t.Error("expected error from invalid address") + return + } +} + +func testNewConfigInvalidAdministrationIP(t *testing.T) { + _, err := NewConfig( + DEFAULT_ACME_EMAIL, + adminHost, + adminTLS, + adminPublicCertPath, + adminPrivateCertPath, + "999.00.999.999:1234", + DEFAULT_PHISHING_HTTP_ADDR, + DEFAULT_PHISHING_HTTPS_ADDR, + databaseOK, + &filemock.Writer{}, + "", + "", + IPSecurityConfig{ + AdminAllowed: defaultAdminAllowed, + TrustedProxies: defaultTrustedProxies, + TrustedIPHeader: DefaultTrustedIPHeader, + }, + ) + if !errors.Is(err, ErrInvalidIP) { + t.Error(err) + return + } +} + +func testNewConfigHappyPath(t *testing.T) { + addr := "127.0.0.1:1234" + c, err := NewConfig( + DEFAULT_ACME_EMAIL, + adminHost, + adminTLS, + adminPublicCertPath, + adminPrivateCertPath, + addr, + DEFAULT_PHISHING_HTTP_ADDR, + DEFAULT_PHISHING_HTTPS_ADDR, + databaseOK, + &filemock.Writer{}, + "", + "", + IPSecurityConfig{ + AdminAllowed: defaultAdminAllowed, + TrustedProxies: defaultTrustedProxies, + TrustedIPHeader: DefaultTrustedIPHeader, + }, + ) + if err != nil { + t.Error(err) + return + } + if c.AdminNetAddress() != addr { + t.Errorf("expected %s but got %s", addr, c.AdminNetAddress()) + return + } + if c.database.DSN != databaseOK.DSN { + t.Errorf("expected %s but got %s", databaseOK.DSN, c.database.DSN) + return + } + if c.database.Engine != databaseOK.Engine { + t.Errorf("expected %s but got %s", databaseOK.Engine, c.database.Engine) + return + } +} + +func testNewConfigInvalidAdministrationPort(t *testing.T) { + _, err := NewConfig( + DEFAULT_ACME_EMAIL, + adminHost, + adminTLS, + adminPublicCertPath, + adminPrivateCertPath, + "127.0.0.1:-1", + DEFAULT_PHISHING_HTTP_ADDR, + DEFAULT_PHISHING_HTTPS_ADDR, + databaseOK, + &filemock.Writer{}, + "", + "", + IPSecurityConfig{ + AdminAllowed: defaultAdminAllowed, + TrustedProxies: defaultTrustedProxies, + TrustedIPHeader: DefaultTrustedIPHeader, + }, + ) + if !errors.Is(err, ErrInvalidPort) { + t.Error(err) + return + } +} + +func testNewConfigInvalidAdministrationPortString(t *testing.T) { + _, err := NewConfig( + DEFAULT_ACME_EMAIL, + adminHost, + adminTLS, + adminPublicCertPath, + adminPrivateCertPath, + "127.0.0.1:999999999999999999999999999999999999999999", + DEFAULT_PHISHING_HTTP_ADDR, + DEFAULT_PHISHING_HTTPS_ADDR, + databaseOK, + &filemock.Writer{}, + "", + "", + IPSecurityConfig{ + AdminAllowed: defaultAdminAllowed, + TrustedProxies: defaultTrustedProxies, + TrustedIPHeader: DefaultTrustedIPHeader, + }, + ) + if err == nil { + t.Error("expected error from invalid string port") + return + } +} + +func testNewConfigInvalidDatabase(t *testing.T) { + _, err := NewConfig( + DEFAULT_ACME_EMAIL, + adminHost, + adminTLS, + adminPublicCertPath, + adminPrivateCertPath, + "127.0.0.1:1234", + DEFAULT_PHISHING_HTTP_ADDR, + DEFAULT_PHISHING_HTTPS_ADDR, + Database{ + Engine: "foobar", + DSN: "file:./data.db?cache=shared&mode=rwc&_fk=1", + }, &filemock.Writer{}, + "", + "", + IPSecurityConfig{ + AdminAllowed: defaultAdminAllowed, + TrustedProxies: defaultTrustedProxies, + TrustedIPHeader: DefaultTrustedIPHeader, + }, + ) + if err == nil { + t.Errorf("expected %s but got %s", ErrInvalidDatabase, err) + return + } +} + +func TestSetFileWriter(t *testing.T) { + t.Run("happypath", func(t *testing.T) { + c := newTestConfig() + err := c.SetFileWriter(&filemock.Writer{}) + if err != nil { + t.Error(err) + return + } + }) + t.Run("nil writer", func(t *testing.T) { + c := newTestConfig() + err := c.SetFileWriter(nil) + if err == nil { + if !errors.Is(err, ErrWriterIsNil) { + t.Error("expected ErrWriterIsNil error from nil writer") + } + t.Error("expected error from nil writer") + return + } + }) +} + +func TestWriteToFile(t *testing.T) { + filepath := "./testFile" + c := newTestConfig() + m := filemock.Writer{} + dto := c.ToDTO() + conf, err := json.MarshalIndent(dto, "", " ") + if err != nil { + t.Error(err) + return + } + m. + On("Write", filepath, conf, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, os.FileMode(0644)). + Return(0, nil) + err = c.SetFileWriter(&m) + if err != nil { + t.Error(err) + return + } + err = c.WriteToFile(filepath) + if err != nil { + t.Error(err) + return + } +} + +func TestToDTO(t *testing.T) { + addr := "127.0.0.1:1234" + c, err := NewConfig( + DEFAULT_ACME_EMAIL, + adminHost, + adminTLS, + adminPublicCertPath, + adminPrivateCertPath, + addr, + DEFAULT_PHISHING_HTTP_ADDR, + DEFAULT_PHISHING_HTTPS_ADDR, + databaseOK, + &filemock.Writer{}, + "", + "", + IPSecurityConfig{ + AdminAllowed: defaultAdminAllowed, + TrustedProxies: defaultTrustedProxies, + TrustedIPHeader: DefaultTrustedIPHeader, + }, + ) + if err != nil { + t.Error(err) + return + } + dto := c.ToDTO() + if dto.AdministrationServer.Address != addr { + t.Errorf("expected %s but got %s", addr, dto.AdministrationServer.Address) + return + } +} + +func TestNewDTOFromFile(t *testing.T) { + + t.Run("happypath", testNewDTOFromFileHappyPath) + t.Run("file error", testNewDTOFromFileFileError) + t.Run("bad content", testNewDTOFromFileBadContent) +} + +func testNewDTOFromFileHappyPath(t *testing.T) { + filesystem := fstest.MapFS{} + path := "config.json" + filesystem[path] = &fstest.MapFile{ + Data: configFileOK, + } + dto, err := NewDTOFromFile(filesystem, path) + if err != nil { + t.Error(err) + return + } + if dto.AdministrationServer.Address != "127.0.0.1:4000" { + t.Errorf("Expected %s Got %s", "127.0.0.1:4000", dto.AdministrationServer.Address) + return + } +} + +func testNewDTOFromFileFileError(t *testing.T) { + filesystem := fstest.MapFS{} + path := "config.json" + _, err := NewDTOFromFile(filesystem, path) + if !errors.Is(err, fs.ErrNotExist) { + t.Errorf("expected %s but got %s", fs.ErrNotExist, err) + return + } +} + +func testNewDTOFromFileBadContent(t *testing.T) { + filesystem := fstest.MapFS{} + path := "config.json" + filesystem[path] = &fstest.MapFile{ + Data: configFileEmpty, + } + _, err := NewDTOFromFile(filesystem, path) + if err == nil { + t.Error("expected error from invalid file contents") + return + } +} + +func TestNewDefaultConfig(t *testing.T) { + tests := []struct { + name string + want *Config + }{ + { + name: "happypath", + want: &Config{ + tlsCertPath: adminPublicCertPath, + tlsKeyPath: adminPrivateCertPath, + adminNetAddress: net.TCPAddr{ + IP: net.IPv4(127, 0, 0, 1), + Port: DefaultDevAdministrationPort, + }, + database: Database{ + Engine: DefaultAdministrationUseSqlite, + DSN: DefaultAdministrationDSN, + }, + fileWriter: &file.FileWriter{}, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if got := NewDevDefaultConfig(); !reflect.DeepEqual(got, tt.want) { + t.Errorf("NewDefaultConfig() = %v, want %v", got, tt.want) + } + }) + } +} + +func TestConfig_Database(t *testing.T) { + t.Run("happypath", func(t *testing.T) { + c := newTestConfig() + if !reflect.DeepEqual(c.Database(), databaseOK) { + t.Errorf("expected %v but got %v", databaseOK, c.Database()) + return + } + }) +} diff --git a/backend/controller/allowDeny.go b/backend/controller/allowDeny.go new file mode 100644 index 0000000..1b89a74 --- /dev/null +++ b/backend/controller/allowDeny.go @@ -0,0 +1,200 @@ +package controller + +import ( + "github.com/gin-gonic/gin" + "github.com/phishingclub/phishingclub/database" + "github.com/phishingclub/phishingclub/model" + "github.com/phishingclub/phishingclub/repository" + "github.com/phishingclub/phishingclub/service" +) + +// AllowDenyColumnsMap is a map between the frontend and the backend +// so the frontend has user friendly names instead of direct references +// to the database schema +// this is tied to a slice in the repository package +var AllowDenyColumnsMap = map[string]string{ + "created_at": repository.TableColumn(database.ALLOW_DENY_TABLE, "created_at"), + "updated_at": repository.TableColumn(database.ALLOW_DENY_TABLE, "updated_at"), + "hosting_website": repository.TableColumn(database.ALLOW_DENY_TABLE, "host_website"), + "redirects": repository.TableColumn(database.ALLOW_DENY_TABLE, "redirect_url"), +} + +// AllowDeny is a controller +type AllowDeny struct { + Common + AllowDenyService *service.AllowDeny +} + +// Create creates a new AllowDeny +func (c *AllowDeny) Create(g *gin.Context) { + session, _, ok := c.handleSession(g) + if !ok { + return + } + // parse request + var req model.AllowDeny + if ok := c.handleParseRequest(g, &req); !ok { + return + } + // save + id, err := c.AllowDenyService.Create(g, session, &req) + // handle response + if ok := c.handleErrors(g, err); !ok { + return + } + c.Response.OK( + g, + gin.H{ + "id": id.String(), + }, + ) +} + +// GetAll gets AllowDenies +func (c *AllowDeny) GetAll(g *gin.Context) { + session, _, ok := c.handleSession(g) + if !ok { + return + } + // parse request + queryArgs, ok := c.handleQueryArgs(g) + if !ok { + return + } + queryArgs.DefaultSortByName() + companyID := companyIDFromRequestQuery(g) + // get + allowDenies, err := c.AllowDenyService.GetAll( + g, + session, + companyID, + &repository.AllowDenyOption{ + QueryArgs: queryArgs, + }, + ) + // handle response + if ok := c.handleErrors(g, err); !ok { + return + } + c.Response.OK( + g, + allowDenies, + ) +} + +// GetAllOverview gets AllowDenies +func (c *AllowDeny) GetAllOverview(g *gin.Context) { + session, _, ok := c.handleSession(g) + if !ok { + return + } + // parse request + queryArgs, ok := c.handleQueryArgs(g) + if !ok { + return + } + queryArgs.DefaultSortByName() + companyID := companyIDFromRequestQuery(g) + allowDenies, err := c.AllowDenyService.GetAll( + g, + session, + companyID, + &repository.AllowDenyOption{ + Fields: []string{ + "id", + "created_at", + "updated_at", + "company_id", + "name", + "allowed", + }, + QueryArgs: queryArgs, + }, + ) + // handle response + if ok := c.handleErrors(g, err); !ok { + return + } + c.Response.OK( + g, + allowDenies, + ) +} + +// GetByID gets an AllowDeny by ID +func (c *AllowDeny) GetByID(g *gin.Context) { + session, _, ok := c.handleSession(g) + if !ok { + return + } + // parse request + id, ok := c.handleParseIDParam(g) + if !ok { + return + } + // get + allowDeny, err := c.AllowDenyService.GetByID( + g, + session, + id, + ) + // handle response + if ok := c.handleErrors(g, err); !ok { + return + } + c.Response.OK( + g, + allowDeny, + ) +} + +// UpdateByID updates an AllowDeny +func (c *AllowDeny) UpdateByID(g *gin.Context) { + session, _, ok := c.handleSession(g) + if !ok { + return + } + // parse request + var req model.AllowDeny + id, ok := c.handleParseIDParam(g) + if !ok { + return + } + if ok := c.handleParseRequest(g, &req); !ok { + + return + } + // update + err := c.AllowDenyService.Update(g, session, id, &req) + // handle response + if ok := c.handleErrors(g, err); !ok { + return + } + c.Response.OK( + g, + nil, + ) +} + +// DeleteByID deletes an AllowDeny +func (c *AllowDeny) DeleteByID(g *gin.Context) { + session, _, ok := c.handleSession(g) + if !ok { + return + } + // parse request + id, ok := c.handleParseIDParam(g) + if !ok { + return + } + // delete + err := c.AllowDenyService.DeleteByID(g, session, id) + // handle response + if ok := c.handleErrors(g, err); !ok { + return + } + c.Response.OK( + g, + nil, + ) +} diff --git a/backend/controller/apiSender.go b/backend/controller/apiSender.go new file mode 100644 index 0000000..88394bc --- /dev/null +++ b/backend/controller/apiSender.go @@ -0,0 +1,196 @@ +package controller + +import ( + "github.com/gin-gonic/gin" + "github.com/phishingclub/phishingclub/database" + "github.com/phishingclub/phishingclub/model" + "github.com/phishingclub/phishingclub/repository" + "github.com/phishingclub/phishingclub/service" +) + +// APISenderColumnsMap is a map between the frontend and the backend +// so the frontend has user friendly names instead of direct references +// to the database schema +// this is tied to a slice in the repository package +var APISenderColumnsMap = map[string]string{ + "created_at": repository.TableColumn(database.API_SENDER_TABLE, "created_at"), + "updated_at": repository.TableColumn(database.API_SENDER_TABLE, "updated_at"), + "name": repository.TableColumn(database.API_SENDER_TABLE, "name"), +} + +// APISender is a API sender controller +type APISender struct { + Common + APISenderService *service.APISender +} + +// Create creates a new api sender +func (a *APISender) Create(g *gin.Context) { + session, _, ok := a.handleSession(g) + if !ok { + return + } + // parse request + var req model.APISender + if ok := a.handleParseRequest(g, &req); !ok { + return + } + id, err := a.APISenderService.Create(g, session, &req) + if ok := a.handleErrors(g, err); !ok { + return + } + a.Response.OK(g, gin.H{"id": id.String()}) +} + +// GetAll gets all api senders +func (a *APISender) GetAll(g *gin.Context) { + session, _, ok := a.handleSession(g) + if !ok { + return + } + companyID := companyIDFromRequestQuery(g) + queryArgs, ok := a.handleQueryArgs(g) + if !ok { + return + } + queryArgs.DefaultSortByUpdatedAt() + queryArgs.RemapOrderBy(APISenderColumnsMap) + apiSenders, err := a.APISenderService.GetAll( + g.Request.Context(), + session, + companyID, + repository.APISenderOption{ + QueryArgs: queryArgs, + }, + ) + if ok := a.handleErrors(g, err); !ok { + return + } + a.Response.OK(g, apiSenders) +} + +// GetAllOverview gets all api senders with limited data +func (a *APISender) GetAllOverview(g *gin.Context) { + session, _, ok := a.handleSession(g) + if !ok { + return + } + companyID := companyIDFromRequestQuery(g) + queryArgs, ok := a.handleQueryArgs(g) + if !ok { + return + } + queryArgs.DefaultSortByUpdatedAt() + queryArgs.RemapOrderBy(APISenderColumnsMap) + apiSenders, err := a.APISenderService.GetAllOverview( + g.Request.Context(), + session, + companyID, + repository.APISenderOption{ + QueryArgs: queryArgs, + }, + ) + if ok := a.handleErrors(g, err); !ok { + return + } + a.Response.OK(g, apiSenders) +} + +// GetByID gets a api sender by ID +func (a *APISender) GetByID(g *gin.Context) { + session, _, ok := a.handleSession(g) + if !ok { + return + } + // parse reqeuest + id, ok := a.handleParseIDParam(g) + if !ok { + return + } + // get api sender + apiSender, err := a.APISenderService.GetByID( + g, + session, + id, + &repository.APISenderOption{}, + ) + if ok := a.handleErrors(g, err); !ok { + return + } + a.Response.OK(g, apiSender) +} + +// Update updates a api sender +func (a *APISender) UpdateByID(g *gin.Context) { + session, _, ok := a.handleSession(g) + if !ok { + return + } + // parse request + id, ok := a.handleParseIDParam(g) + if !ok { + return + } + var req model.APISender + if ok := a.handleParseRequest(g, &req); !ok { + return + } + err := a.APISenderService.UpdateByID( + g, + session, + id, + &req, + ) + if ok := a.handleErrors(g, err); !ok { + return + } + a.Response.OK(g, gin.H{}) +} + +// DeletebyID deletes a api sender by ID +func (a *APISender) DeleteByID(g *gin.Context) { + // handle session + session, _, ok := a.handleSession(g) + if !ok { + return + } + // parse request + id, ok := a.handleParseIDParam(g) + if !ok { + return + } + err := a.APISenderService.DeleteByID( + g.Request.Context(), + session, + id, + ) + if ok := a.handleErrors(g, err); !ok { + return + } + a.Response.OK(g, gin.H{}) +} + +// SendTest sends a api request test and outputs the api sender and response +func (a *APISender) SendTest(g *gin.Context) { + // handle session + session, _, ok := a.handleSession(g) + if !ok { + return + } + // parse request + id, ok := a.handleParseIDParam(g) + if !ok { + return + } + data, err := a.APISenderService.SendTest( + g.Request.Context(), + session, + id, + ) + // output the error + if err != nil { + a.Response.BadRequestMessage(g, err.Error()) + return + } + a.Response.OK(g, data) +} diff --git a/backend/controller/asset.go b/backend/controller/asset.go new file mode 100644 index 0000000..3636490 --- /dev/null +++ b/backend/controller/asset.go @@ -0,0 +1,487 @@ +package controller + +import ( + "encoding/base64" + "fmt" + "io/fs" + "net/http" + "net/url" + "os" + "path/filepath" + + "github.com/go-errors/errors" + + securejoin "github.com/cyphar/filepath-securejoin" + "github.com/gin-gonic/gin" + "github.com/google/uuid" + "github.com/oapi-codegen/nullable" + "github.com/phishingclub/phishingclub/data" + "github.com/phishingclub/phishingclub/database" + "github.com/phishingclub/phishingclub/errs" + "github.com/phishingclub/phishingclub/model" + "github.com/phishingclub/phishingclub/repository" + "github.com/phishingclub/phishingclub/service" + "github.com/phishingclub/phishingclub/utils" + "github.com/phishingclub/phishingclub/vo" +) + +// AssetOrderByMap is a map between the frontend and the backend +// so the frontend has user friendly names instead of direct references +// to the database schema +// this is tied to a slice in the repository package +var AssetsColumnsMap = map[string]string{ + "created_at": repository.TableColumn(database.ASSET_TABLE, "created_at"), + "updated_at": repository.TableColumn(database.ASSET_TABLE, "updated_at"), + "name": repository.TableColumn(database.ASSET_TABLE, "name"), + "description": repository.TableColumn(database.ASSET_TABLE, "description"), + "path": repository.TableColumn(database.ASSET_TABLE, "path"), +} + +// Asset is an static Asset controller +type Asset struct { + Common + StaticAssetPath string + DomainService *service.Domain + OptionService *service.Option + AssetService *service.Asset +} + +// GetContentByID get the content and mime type of an asset +func (a *Asset) GetContentByID(g *gin.Context) { + // handle session + session, _, ok := a.handleSession(g) + if !ok { + return + } + // check permissions + isAuthorized, err := service.IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + _ = handleServerError(g, a.Response, err) + return + } + if !isAuthorized { + a.Response.Unauthorized(g) + return + } + // get domain + domain, err := vo.NewString255(g.Param("domain")) + if err != nil { + a.Logger.Errorw("invalid domain", + "domain", domain, + ) + a.Response.ValidationFailed(g, "Domain", err) + return + } + // if the target is the global folder, use the global folder + if domain.String() == data.ASSET_GLOBAL_FOLDER { + // TODO this shold require special permissions or be prefixed with a special path + // such as the company name or something that is prefixed + _ = data.ASSET_GLOBAL_FOLDER + } + staticPath, err := securejoin.SecureJoin(a.StaticAssetPath, domain.String()) + if err != nil { + a.Logger.Debugw("insecure path", + "path", a.StaticAssetPath, + "domain", domain.String(), + "error", err, + ) + return + } + // get the file path + pathDecoded, err := url.QueryUnescape(g.Param("path")) + if err != nil { + a.Logger.Debugw("failed to decode path", + "error", err, + ) + a.Response.BadRequest(g) + return + } + + filePath, err := securejoin.SecureJoin(staticPath, pathDecoded) + if err != nil { + a.Logger.Debugw("insecure path", + "path", pathDecoded, + "error", err, + ) + a.Response.BadRequest(g) + return + } + // check if the file exists + a.Logger.Debugw("checking if asset exists", + "path", filePath, + ) + _, err = os.Stat(filePath) + if errors.Is(err, fs.ErrNotExist) { + a.Logger.Debugw("asset not found", + "path", filePath, + ) + a.Response.NotFound(g) + return + } + if err != nil { + a.Logger.Errorw("failed to get asset path info", + "path", filePath, + "error", err, + ) + a.Response.ServerError(g) + return + } + // serve the file + // #nosec + content, err := os.ReadFile(filePath) + if err != nil { + a.Logger.Errorw("failed to read asset", + "path", filePath, + "error", err, + ) + a.Response.ServerError(g) + return + } + + fileExt := filepath.Ext(filePath) + mimeType := "" + switch fileExt { + case ".html": + mimeType = "text/html" + case ".htm": + mimeType = "text/html" + case ".xhtml": + mimeType = "application/xhtml+xml" + default: + mimeType = http.DetectContentType(content) + } + encodedContent := base64.StdEncoding.EncodeToString(content) + + a.Response.OK(g, gin.H{ + "mimeType": mimeType, + "file": encodedContent, + }) +} + +// GetAllForContext gets all static assets for a domain +// and has a special case 'shared' to get all global assets +func (a *Asset) GetAllForContext(g *gin.Context) { + // handle session + session, _, ok := a.handleSession(g) + if !ok { + return + } + // check permissions + isAuthorized, err := service.IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + _ = handleServerError(g, a.Response, err) + return + } + if !isAuthorized { + a.Response.Unauthorized(g) + return + } + // parse request + var domainID *uuid.UUID + companyID := companyIDFromRequestQuery(g) + domainParam := g.Param("domain") + queryArgs, ok := a.handleQueryArgs(g) + if !ok { + return + } + // set default sort by + queryArgs.RemapOrderBy(AssetsColumnsMap) + queryArgs.DefaultSortByUpdatedAt() + a.Logger.Debugw("getting assets for domain", + "domain", domainParam, + "companyID", companyID, + ) + // if there is no domain then it is a global asset request + // else the domain name is the asset scope + if len(domainParam) > 0 { + domainName, err := vo.NewString255(domainParam) + if err != nil { + a.Logger.Errorw("invalid domain", + "domain", domainName, + ) + a.Response.ValidationFailed(g, "Domain", err) + return + } + // get the domains id and also check if the user has permission to retrieve it + domain, err := a.DomainService.GetByName( + g.Request.Context(), + session, + domainName, + &repository.DomainOption{}, + ) + if ok := a.handleErrors(g, err); !ok { + return + } + did := domain.ID.MustGet() + domainID = &did + } + // get assets + a.Logger.Debugw("getting assets for domain by ID", + "domainID", domainID, + ) + assets, err := a.AssetService.GetAll( + g, + session, + domainID, + companyID, + queryArgs, + ) + // handle responses + a.handleErrors(g, err) + a.Response.OK(g, assets) +} + +// Create uploads an static asset +func (a *Asset) Create(g *gin.Context) { + // handle session + session, _, ok := a.handleSession(g) + if !ok { + return + } + // this is a form data request, so we must handle all fields manually as is it not parsed from the struct + multipartData, err := g.MultipartForm() + if err != nil { + a.Logger.Errorw("failed to get multipart form", + "error", err, + ) + a.Response.BadRequest(g) + return + } + if len(multipartData.File["files"]) == 0 { + a.Logger.Debug("no files to upload") + a.Response.BadRequestMessage(g, "No files selected") + return + } + contextParam := g.PostForm("domain") + // if no domain is set, use the global folder + var domain *model.Domain + // if a domain is supplied we look for its assets + if len(contextParam) > 0 { + // check that the domain exists + name, err := vo.NewString255(contextParam) + if err != nil { + a.Logger.Errorw("invalid domain name", + "error", err, + ) + a.Response.ValidationFailed(g, "Domain", err) + return + } + d, err := a.DomainService.GetByName( + g, + session, + name, + &repository.DomainOption{}, + ) + if ok := a.handleErrors(g, err); !ok { + return + } + domain = d + a.Logger.Debugw("uploading assets to domain", + "domain", contextParam, + ) + } else { + a.Logger.Debug("uploading shared assets") + } + // map files to assets + assets := []*model.Asset{} + for _, file := range multipartData.File["files"] { + // check max file size + maxFile, err := a.OptionService.GetOption(g, session, data.OptionKeyMaxFileUploadSizeMB) + if ok := a.handleErrors(g, err); !ok { + return + } + ok, err := utils.CompareFileSizeFromString(file.Size, maxFile.Value.String()) + if err != nil { + a.Logger.Errorw("failed to compare file size", + "error", err, + ) + } + if !ok { + a.Logger.Debugw("file too large", + "filename", file.Filename, + "size", file.Size, + "maxSize", maxFile.Value.String(), + ) + a.Response.ValidationFailed( + g, + "File", + fmt.Errorf("file '%s' is too large", utils.ReadableFileName(file.Filename)), + ) + return + } + // TODO multi user validate that the company id is the same as the session company id or that the session is a super admin + // TODO can the creation of the ID be moved to the repo + var domainID string + if domain != nil { + did := domain.ID.MustGet() + domainID = did.String() + } + name, err := vo.NewOptionalString127(g.Request.PostFormValue("name")) + if err != nil { + a.Logger.Debugw("failed to parse name", + "error", err, + ) + a.Response.ValidationFailed(g, "Name", err) + return + } + description, err := vo.NewOptionalString255(g.Request.PostFormValue("description")) + if err != nil { + a.Logger.Debugw("failed to parse description", + "error", err, + ) + a.Response.ValidationFailed(g, "Description", err) + return + } + path, err := vo.NewRelativeFilePath(g.Request.PostFormValue("path")) + if err != nil { + a.Logger.Debugw("failed to parse path", + "error", err, + ) + a.Response.ValidationFailed(g, "Path", err) + return + } + companyID := nullable.NewNullNullable[uuid.UUID]() + companyIDParam := g.PostForm("companyID") + if len(companyIDParam) > 0 { + cid, err := uuid.Parse(companyIDParam) + if err != nil { + a.Logger.Debugw("failed to parse company id", + "error", err, + ) + a.Response.ValidationFailed(g, "CompanyID", err) + } + companyID.Set(cid) + } else { + companyID.SetNull() + } + + assetName := nullable.NewNullableWithValue(*name) + assetDescription := nullable.NewNullableWithValue(*description) + assetPath := nullable.NewNullableWithValue(*path) + assetDomainID := nullable.NewNullNullable[uuid.UUID]() + if len(domainID) > 0 { + did, err := uuid.Parse(domainID) + if err != nil { + a.Logger.Debugw("failed to parse domain id", + "error", err, + ) + a.Response.ValidationFailed(g, "DomainID", err) + return + } + assetDomainID.Set(did) + // if the asset belongs to a domain it must not be 'global' context + if !companyID.IsSpecified() { + a.Logger.Debugw( + "cant add a shared asset to a company owned domain", + "domainID", domainID, + "domainOwnerCompanyID", companyID, + ) + a.Response.ValidationFailed( + g, + "domainID", + errors.New("cant add a shared asset to a company owned domain"), + ) + return + } + } + asset := model.Asset{ + Name: assetName, + Description: assetDescription, + Path: assetPath, + File: *file, + DomainID: assetDomainID, + CompanyID: companyID, + } + if domain != nil { + asset.DomainName = domain.Name + } + assets = append(assets, &asset) + } + // store the files on disk and in database + ids, err := a.AssetService.Create(g, session, assets) + if ok := a.handleErrors(g, err); !ok { + return + } + a.Response.OK(g, gin.H{ + "ids": ids, + "files_uploaded": len(assets), + }) +} + +// GetByID gets an static asset by id +func (a *Asset) GetByID(g *gin.Context) { + // handle session + session, _, ok := a.handleSession(g) + if !ok { + return + } + // parse request + id, ok := a.handleParseIDParam(g) + if !ok { + return + } + // get the asset + ctx := g.Request.Context() + asset, err := a.AssetService.GetByID(ctx, session, id) + if ok := a.handleErrors(g, err); !ok { + return + } + a.Response.OK(g, asset) +} + +// UpdateByID updates an static asset by id +func (a *Asset) UpdateByID(g *gin.Context) { + // handle session + session, _, ok := a.handleSession(g) + if !ok { + return + } + // parse request + var req model.Asset + if ok := a.handleParseRequest(g, &req); !ok { + return + } + id, ok := a.handleParseIDParam(g) + if !ok { + return + } + // update the asset + ctx := g.Request.Context() + err := a.AssetService.UpdateByID( + ctx, + session, + id, + req.Name, + req.Description, + ) + if ok := a.handleErrors(g, err); !ok { + return + } + a.Response.OK(g, gin.H{}) +} + +// RemoveByID removes an static asset +// if the asset is a directory, it will be removed recursively +func (a *Asset) RemoveByID(g *gin.Context) { + // handle session + session, _, ok := a.handleSession(g) + if !ok { + return + } + // parse request + id, ok := a.handleParseIDParam(g) + if !ok { + return + } + // remove the asset + ctx := g.Request.Context() + err := a.AssetService.DeleteByID( + ctx, + session, + id, + ) + if ok := a.handleErrors(g, err); !ok { + return + } + a.Response.OK(g, gin.H{}) +} diff --git a/backend/controller/attachment.go b/backend/controller/attachment.go new file mode 100644 index 0000000..67ba0b0 --- /dev/null +++ b/backend/controller/attachment.go @@ -0,0 +1,404 @@ +package controller + +import ( + "encoding/base64" + "fmt" + "net/http" + "os" + "path/filepath" + "strings" + + "github.com/go-errors/errors" + + "github.com/gin-gonic/gin" + "github.com/google/uuid" + "github.com/oapi-codegen/nullable" + "github.com/phishingclub/phishingclub/data" + "github.com/phishingclub/phishingclub/database" + "github.com/phishingclub/phishingclub/errs" + "github.com/phishingclub/phishingclub/model" + "github.com/phishingclub/phishingclub/repository" + "github.com/phishingclub/phishingclub/service" + "github.com/phishingclub/phishingclub/utils" + "github.com/phishingclub/phishingclub/vo" +) + +// AttachmentColumnsMap is a map between the frontend and the backend +// so the frontend has user friendly names instead of direct references +// to the database schema +// this is tied to a slice in the repository package +var AttachmentColumnsMap = map[string]string{ + "created_at": repository.TableColumn(database.ATTACHMENT_TABLE, "created_at"), + "updated_at": repository.TableColumn(database.ATTACHMENT_TABLE, "updated_at"), + "name": repository.TableColumn(database.ATTACHMENT_TABLE, "name"), + "description": repository.TableColumn(database.ATTACHMENT_TABLE, "description"), + "embedded content": repository.TableColumn(database.ATTACHMENT_TABLE, "embeddedContent"), + "filename": repository.TableColumn(database.ATTACHMENT_TABLE, "filename"), +} + +// Attachment is an static Attachment controller +type Attachment struct { + Common + StaticAttachmentPath string + TemplateService *service.Template + AttachmentService *service.Attachment + OptionService *service.Option + CompanyService *service.Company +} + +// GetContentByID returns the content and mime type of an attachment +func (a *Attachment) GetContentByID(g *gin.Context) { + session, _, ok := a.handleSession(g) + if !ok { + return + } + // parse request + id, ok := a.handleParseIDParam(g) + if !ok { + return + } + // get the attachment + ctx := g.Request.Context() + attachment, err := a.AttachmentService.GetByID( + ctx, + session, + id, + ) + if ok := a.handleErrors(g, err); !ok { + return + } + p := attachment.Path.MustGet().String() + // serve the file + // #nosec + content, err := os.ReadFile(p) + if err != nil { + a.Logger.Errorw("failed to read file", + "path", p, + "error", err, + ) + a.Response.ServerError(g) + return + } + + fileExt := filepath.Ext(p) + mimeType := "" + switch fileExt { + case ".html": + mimeType = "text/html" + case ".htm": + mimeType = "text/html" + case ".xhtml": + mimeType = "application/xhtml+xml" + default: + mimeType = http.DetectContentType(content) + } + // get by id is only used for admin viewing of an attachemnt, so all + // embedded content must contain example data + if attachment.EmbeddedContent.MustGet() { + // build email + domain := &model.Domain{ + Name: nullable.NewNullableWithValue( + *vo.NewString255Must("example.test"), + ), + } + recipient := model.NewRecipientExample() + campaignRecipient := model.CampaignRecipient{ + ID: nullable.NewNullableWithValue( + uuid.New(), + ), + Recipient: recipient, + } + email := model.NewEmailExample() + // hacky + email.Content = nullable.NewNullableWithValue( + *vo.NewUnsafeOptionalString1MB(string(content)), + ) + apiSender := model.NewAPISenderExample() + b, err := a.TemplateService.CreateMailBody( + "id", + "/foo", + domain, + &campaignRecipient, + email, + apiSender, + ) + if err != nil { + a.Logger.Errorw("failed to appy template to attachment", + "error", err, + ) + a.Response.ServerError(g) + return + } + content = []byte(b) + } + a.Response.OK(g, gin.H{ + "mimeType": mimeType, + "file": base64.StdEncoding.EncodeToString(content), + }) +} + +// GetAllForContext gets all attachments for a domain +// and has a special case 'shared' to get all global attachments +func (a *Attachment) GetAllForContext(g *gin.Context) { + session, _, ok := a.handleSession(g) + if !ok { + return + } + // check permissions + isAuthorized, err := service.IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + a.Logger.Errorw("failed to check permissions", + "error", err, + ) + a.Response.ServerError(g) + return + } + if !isAuthorized { + // TODO audit log + _ = handleAuthorizationError(g, a.Response, errs.ErrAuthorizationFailed) + return + } + // parse request + companyID := companyIDFromRequestQuery(g) + // if there is no companyID then it is a global attachment request + // else the company context name is the attachment scope + if companyID != nil { + // get the company id and to check if the user has permission to retrieve it + _, err := a.CompanyService.GetByID( + g.Request.Context(), + session, + companyID, + ) + if ok := a.handleErrors(g, err); !ok { + return + } + } + queryArgs, ok := a.handleQueryArgs(g) + if !ok { + return + } + queryArgs.DefaultSortByUpdatedAt() + queryArgs.RemapOrderBy(AttachmentColumnsMap) + // get attachments + a.Logger.Debugw("getting attachments for company ID", + "companyID", companyID, + ) + attachments, err := a.AttachmentService.GetAll( + g, + session, + companyID, + queryArgs, + ) + // handle responses + if ok := a.handleErrors(g, err); !ok { + return + } + a.Response.OK(g, attachments) +} + +// Create uploads an attachment +func (a *Attachment) Create(g *gin.Context) { + session, _, ok := a.handleSession(g) + if !ok { + return + } + // parse request + multipartData, err := g.MultipartForm() + if err != nil { + a.Logger.Errorw("failed to get multipart form", + "error", err, + ) + a.Response.BadRequest(g) + return + } + if len(multipartData.File["files"]) == 0 { + a.Logger.Debug("no files to upload") + a.Response.BadRequestMessage(g, "No files selected") + return + } + companyID := nullable.NewNullNullable[uuid.UUID]() + companyIDParam := g.PostForm("companyID") + if len(companyIDParam) > 0 { + cid, err := uuid.Parse(companyIDParam) + if err != nil { + a.Logger.Debugw("failed to parse company id", + "error", err, + ) + a.Response.ValidationFailed(g, "companyID", err) + return + } + companyID.Set(cid) + } + nameParam, err := vo.NewOptionalString127(g.PostForm("name")) + if err != nil { + a.Logger.Debugw("failed to parse name", + "name", g.PostForm("name"), + "error", err, + ) + a.Response.ValidationFailed(g, "name", err) + return + } + name := nullable.NewNullableWithValue(*nameParam) + descriptionParam, err := vo.NewOptionalString255(g.PostForm("description")) + if err != nil { + a.Logger.Debugw("failed to parse description", + "error", err, + ) + a.Response.ValidationFailed(g, "description", err) + return + } + description := nullable.NewNullableWithValue(*descriptionParam) + embeddedContent := nullable.NewNullableWithValue(false) + embeddedContentString := g.PostForm("embeddedContent") + if strings.ToLower(embeddedContentString) == "true" { + embeddedContent.Set(true) + } + attachments := []*model.Attachment{} + for _, file := range multipartData.File["files"] { + // TODO multi user validate that the company id is the same as the session company id or that the session is a super admin + // check max file size + maxFile, err := a.OptionService.GetOption(g, session, data.OptionKeyMaxFileUploadSizeMB) + if ok := a.handleErrors(g, err); !ok { + return + } + ok, err := utils.CompareFileSizeFromString(file.Size, maxFile.Value.String()) + if err != nil { + a.Logger.Errorw("failed to compare file size", + "error", err, + ) + } + if !ok { + a.Logger.Debugw("file too large", + "filename", file.Filename, + "size", file.Size, + "maxSize", maxFile.Value.String(), + ) + a.Response.ValidationFailed( + g, + "File", + fmt.Errorf("'%s' is too large", utils.ReadableFileName(file.Filename)), + ) + return + } + fileNameParam, err := vo.NewFileName(file.Filename) + if err != nil { + a.Logger.Debugw("failed to parse filename", + "error", err, + ) + a.Response.ValidationFailed(g, "filename", err) + return + } + fileName := nullable.NewNullableWithValue(*fileNameParam) + + attachment := model.Attachment{ + CompanyID: companyID, + Name: name, + Description: description, + EmbeddedContent: embeddedContent, + File: file, + FileName: fileName, + } + if err := attachment.Validate(); err != nil { + a.Logger.Debugw("failed to validate attachment", + "attachmentName", name, + "error", err, + ) + a.Response.ValidationFailed(g, "attachment", err) + return + } + attachments = append(attachments, &attachment) + } + // store the files on disk and in database + createdIDs, err := a.AttachmentService.Create( + g, + session, + attachments, + ) + if ok := a.handleErrors(g, err); !ok { + return + } + a.Response.OK(g, gin.H{ + "ids": createdIDs, + "files_uploaded": len(attachments), + }) +} + +// GetByID gets an static attachment by id +func (a *Attachment) GetByID(g *gin.Context) { + session, _, ok := a.handleSession(g) + if !ok { + return + } + // parse request + id, ok := a.handleParseIDParam(g) + if !ok { + return + } + // get the attachment + ctx := g.Request.Context() + attachment, err := a.AttachmentService.GetByID( + ctx, + session, + id, + ) + if ok := a.handleErrors(g, err); !ok { + return + } + a.Response.OK(g, attachment) +} + +// UpdateByID updates an static attachment by id +func (a *Attachment) UpdateByID(g *gin.Context) { + // handle session + session, _, ok := a.handleSession(g) + if !ok { + return + } + id, ok := a.handleParseIDParam(g) + if !ok { + return + } + // parse request + var req model.Attachment + if ok := a.handleParseRequest(g, &req); !ok { + return + } + // update the attachment + ctx := g.Request.Context() + err := a.AttachmentService.UpdateByID( + ctx, + session, + id, + &req, + ) + if ok := a.handleErrors(g, err); !ok { + return + } + a.Response.OK(g, gin.H{}) +} + +// RemoveByID removes an static attachment +// if the attachment is a directory, it will be removed recursively +func (a *Attachment) RemoveByID(g *gin.Context) { + // handle session + session, _, ok := a.handleSession(g) + if !ok { + return + } + // parse request + id, ok := a.handleParseIDParam(g) + if !ok { + return + } + // remove the attachment + ctx := g.Request.Context() + err := a.AttachmentService.DeleteByID( + ctx, + session, + id, + ) + if ok := a.handleErrors(g, err); !ok { + return + } + a.Response.OK(g, gin.H{}) +} diff --git a/backend/controller/campaign.go b/backend/controller/campaign.go new file mode 100644 index 0000000..61f58ec --- /dev/null +++ b/backend/controller/campaign.go @@ -0,0 +1,927 @@ +package controller + +import ( + "bytes" + "encoding/csv" + "time" + + "github.com/go-errors/errors" + + "github.com/gin-gonic/gin" + "github.com/google/uuid" + "github.com/phishingclub/phishingclub/build" + "github.com/phishingclub/phishingclub/cache" + "github.com/phishingclub/phishingclub/data" + "github.com/phishingclub/phishingclub/database" + "github.com/phishingclub/phishingclub/embedded" + "github.com/phishingclub/phishingclub/errs" + "github.com/phishingclub/phishingclub/model" + "github.com/phishingclub/phishingclub/repository" + "github.com/phishingclub/phishingclub/service" + "github.com/phishingclub/phishingclub/utils" +) + +// allowedCampaignColumns is a map between the frontend and the backend +// so the frontend has user friendly names instead of direct references +// to the database schema +// this is tied to a slice in the repository package +var allowedCampaignColumns = map[string]string{ + "created_at": repository.TableColumn(database.CAMPAIGN_TABLE, "created_at"), + "updated_at": repository.TableColumn(database.CAMPAIGN_TABLE, "updated_at"), + "closed_at": repository.TableColumn(database.CAMPAIGN_TABLE, "closed_at"), + "close_at": repository.TableColumn(database.CAMPAIGN_TABLE, "close_at"), + "anonymized_at": repository.TableColumn(database.CAMPAIGN_TABLE, "anonymized_at"), + "is_test": repository.TableColumn(database.CAMPAIGN_TABLE, "is_test"), + "send_start_at": repository.TableColumn(database.CAMPAIGN_TABLE, "send_start_at"), + "send_end_at": repository.TableColumn(database.CAMPAIGN_TABLE, "send_end_at"), + "template": repository.TableColumn(database.CAMPAIGN_TEMPLATE_TABLE, "name"), + "name": repository.TableColumn(database.CAMPAIGN_TABLE, "name"), +} + +// campaignEventColumns is a map between the frontend and the backend +// so the frontend has user friendly names instead of direct references +// to the database schema +// this is tied to a slice in the repository package +var campaignEventColumns = map[string]string{ + "created_at": repository.TableColumn(database.CAMPAIGN_EVENT_TABLE, "created_at"), + "updated_at": repository.TableColumn(database.CAMPAIGN_EVENT_TABLE, "updated_at"), + "details": repository.TableColumn(database.CAMPAIGN_EVENT_TABLE, "data"), + "ip": repository.TableColumn(database.CAMPAIGN_EVENT_TABLE, "ip_address"), + "user-agent": repository.TableColumn(database.CAMPAIGN_EVENT_TABLE, "user_agent"), + "email": repository.TableColumn(database.RECIPIENT_TABLE, "email"), + "first_name": repository.TableColumn(database.RECIPIENT_TABLE, "first_name"), + "last_name": repository.TableColumn(database.RECIPIENT_TABLE, "last_name"), + "event": repository.TableColumn(database.EVENT_TABLE, "name"), +} + +// allowedCampaignRecipientColumns is a map between the frontend and the backend +// so the frontend has user friendly names instead of direct references +// to the database schema +// this is tied to a slice in the repository package +var allowedCampaignRecipientColumns = map[string]string{ + "created_at": "campaign_recipients.created_at", + "updated_at": "campaign_recipients.updated_at", + "send_at": "campaign_recipients.send_at", + "sent_at": "campaign_recipients.sent_at", + "cancelled_at": "campaign_recipients.cancelled_at", + "status": "campaign_recipients.notable_event_id", + "first_name": "recipients.first_name", + "last_name": "recipients.last_name", + "email": "recipients.email", +} + +// Campaign is a Campaign controller +type Campaign struct { + Common + CampaignService *service.Campaign +} + +// CloseCampaignByID closes campaign +func (c *Campaign) CloseCampaignByID(g *gin.Context) { + // handle session + session, _, ok := c.handleSession(g) + if !ok { + return + } + id, ok := c.handleParseIDParam(g) + if !ok { + return + } + // close campaigns + err := c.CampaignService.CloseCampaignByID( + g.Request.Context(), + session, + id, + ) + // handle responses + if errors.Is(err, errs.ErrCampaignAlreadyClosed) { + c.Response.ValidationFailed(g, "", err) + return + } + if ok := c.handleErrors(g, err); !ok { + return + } + c.Response.OK(g, gin.H{}) +} + +// Create creates a new campaign +func (c *Campaign) Create(g *gin.Context) { + // handle session + session, _, ok := c.handleSession(g) + if !ok { + return + } + // parse req + var req model.Campaign + if ok := c.handleParseRequest(g, &req); !ok { + return + } + // create and schedule the campaign + id, err := c.CampaignService.Create(g.Request.Context(), session, &req) + // handle responses + if ok := c.handleErrors(g, err); !ok { + return + } + c.Response.OK(g, gin.H{ + "id": id.String(), + }) +} + +// GetAllEventTypes gets all event types +func (c *Campaign) GetAllEventTypes(g *gin.Context) { + session, _, ok := c.handleSession(g) + if !ok { + return + } + // check permissions + isAuthorized, err := service.IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + _ = handleServerError(g, c.Response, err) + return + } + if !isAuthorized { + c.Response.Unauthorized(g) + return + } + // get all event names + // we pick them out from the in memory cache + ev := []gin.H{} + for name, id := range cache.EventIDByName { + ev = append(ev, gin.H{ + "id": id, + "name": name, + }) + } + c.Response.OK(g, ev) +} + +// GetByID gets a campaign by its id +func (c *Campaign) GetByID(g *gin.Context) { + // handle session + session, _, ok := c.handleSession(g) + if !ok { + return + } + // parse request + id, ok := c.handleParseIDParam(g) + if !ok { + return + } + // get the campaign that needs to be updated + campaign, err := c.CampaignService.GetByID( + g.Request.Context(), + session, + id, + &repository.CampaignOption{ + WithRecipientGroups: true, + WithAllowDeny: true, + WithDenyPage: true, + }, + ) + // handle responses + if ok := c.handleErrors(g, err); !ok { + return + } + c.Response.OK(g, campaign) +} + +// GetByName gets a campaign by name +func (c *Campaign) GetByName(g *gin.Context) { + // handle session + session, _, ok := c.handleSession(g) + if !ok { + return + } + // parse request + companyID := companyIDFromRequestQuery(g) + name := g.Param("name") + if !ok { + return + } + // get the campaign that needs to be updated + campaign, err := c.CampaignService.GetByName( + g, + session, + name, + companyID, + &repository.CampaignOption{ + WithRecipientGroups: true, + WithAllowDeny: true, + WithDenyPage: true, + }, + ) + // handle responses + if ok := c.handleErrors(g, err); !ok { + return + } + c.Response.OK(g, campaign) +} + +// GetResultStats get campaign result stats +func (c *Campaign) GetResultStats(g *gin.Context) { + session, _, ok := c.handleSession(g) + if !ok { + return + } + // parse request + id, ok := c.handleParseIDParam(g) + if !ok { + return + } + // get + stats, err := c.CampaignService.GetResultStats( + g.Request.Context(), + session, + id, + ) + // handle responses + if ok := c.handleErrors(g, err); !ok { + return + } + c.Response.OK(g, stats) +} + +// GetCampaignStats get campaign stats +// if no company id is provided it gets the global stats including all companies +func (c *Campaign) GetStats(g *gin.Context) { + session, _, ok := c.handleSession(g) + if !ok { + return + } + // parse request + companyID := companyIDFromRequestQuery(g) + // get + stats, err := c.CampaignService.GetStats( + g.Request.Context(), + session, + companyID, + ) + // handle responses + if ok := c.handleErrors(g, err); !ok { + return + } + c.Response.OK(g, stats) +} + +// GetAll gets all campaigns with pagination +func (c *Campaign) GetAll(g *gin.Context) { + session, _, ok := c.handleSession(g) + if !ok { + return + } + // parse request + companyID := companyIDFromRequestQuery(g) + queryArgs, ok := c.handleQueryArgs(g) + if !ok { + return + } + queryArgs.RemapOrderBy(allowedCampaignColumns) + queryArgs.DefaultSortByUpdatedAt() + // get all campaigns + campaigns, err := c.CampaignService.GetAll( + g.Request.Context(), + session, + companyID, + &repository.CampaignOption{ + QueryArgs: queryArgs, + WithCampaignTemplate: true, + }, + ) + // handle responses + if ok := c.handleErrors(g, err); !ok { + return + } + c.Response.OK(g, campaigns) + +} + +// GetAll gets all campaigns within dates +func (c *Campaign) GetAllWithinDates(g *gin.Context) { + session, _, ok := c.handleSession(g) + if !ok { + return + } + // parse request + companyID := companyIDFromRequestQuery(g) + queryArgs, ok := c.handleQueryArgs(g) + if !ok { + return + } + queryArgs.RemapOrderBy(allowedCampaignColumns) + queryArgs.DefaultSortByUpdatedAt() + // get start and end date for query + startDate, err := time.Parse(time.RFC3339Nano, g.Query("start")) + if err != nil { + c.Response.ValidationFailed(g, "start", err) + return + } + endDate, err := time.Parse(time.RFC3339Nano, g.Query("end")) + if err != nil { + c.Response.ValidationFailed(g, "end", err) + return + } + // get all campaigns + campaigns, err := c.CampaignService.GetAllWithinDates( + g.Request.Context(), + session, + startDate, + endDate, + companyID, + &repository.CampaignOption{ + QueryArgs: queryArgs, + WithCampaignTemplate: true, + }, + ) + // handle responses + if ok := c.handleErrors(g, err); !ok { + return + } + c.Response.OK(g, campaigns) + +} + +// GetAllActive gets all active campaigns with pagination +// if no company id is given it gets all globals including company +func (c *Campaign) GetAllActive(g *gin.Context) { + session, _, ok := c.handleSession(g) + if !ok { + return + } + // parse request + companyID := companyIDFromRequestQuery(g) + queryArgs, ok := c.handleQueryArgs(g) + if !ok { + return + } + queryArgs.RemapOrderBy(allowedCampaignColumns) + if queryArgs.OrderBy == "" { + queryArgs.OrderBy = "send_start_at" + queryArgs.Desc = false + } + // get all campaigns + campaigns, err := c.CampaignService.GetAllActive( + g.Request.Context(), + session, + companyID, + &repository.CampaignOption{ + QueryArgs: queryArgs, + WithCompany: true, + WithCampaignTemplate: true, + }, + ) + // handle responses + if ok := c.handleErrors(g, err); !ok { + return + } + c.Response.OK(g, campaigns) +} + +// GetAllUpcoming gets all upcoming campaigns with pagination +// if no company id is given it gets all globals including company +func (c *Campaign) GetAllUpcoming(g *gin.Context) { + session, _, ok := c.handleSession(g) + if !ok { + return + } + // parse request + companyID := companyIDFromRequestQuery(g) + queryArgs, ok := c.handleQueryArgs(g) + if !ok { + return + } + queryArgs.RemapOrderBy(allowedCampaignColumns) + if queryArgs.OrderBy == "" { + queryArgs.OrderBy = "send_start_at" + queryArgs.Desc = false + } + // get all campaigns + campaigns, err := c.CampaignService.GetAllUpcoming( + g.Request.Context(), + session, + companyID, + &repository.CampaignOption{ + QueryArgs: queryArgs, + WithCompany: true, + WithCampaignTemplate: true, + }, + ) + // handle responses + if ok := c.handleErrors(g, err); !ok { + return + } + c.Response.OK(g, campaigns) +} + +// GetAllFinished gets all finished campaigns with pagination +// if no company id is given it gets all globals including company +func (c *Campaign) GetAllFinished(g *gin.Context) { + session, _, ok := c.handleSession(g) + if !ok { + return + } + // parse request + companyID := companyIDFromRequestQuery(g) + queryArgs, ok := c.handleQueryArgs(g) + if !ok { + return + } + queryArgs.RemapOrderBy(allowedCampaignColumns) + if queryArgs.OrderBy == "" { + queryArgs.OrderBy = "send_start_at" + queryArgs.Desc = true + } + // get all campaigns + campaigns, err := c.CampaignService.GetAllFinished( + g.Request.Context(), + session, + companyID, + &repository.CampaignOption{ + QueryArgs: queryArgs, + WithCompany: true, + WithCampaignTemplate: true, + }, + ) + // handle responses + if ok := c.handleErrors(g, err); !ok { + return + } + c.Response.OK(g, campaigns) +} + +// GetEventsByCampaignID gets events by campaign id +func (c *Campaign) GetEventsByCampaignID(g *gin.Context) { + session, _, ok := c.handleSession(g) + if !ok { + return + } + // parse request + id, ok := c.handleParseIDParam(g) + if !ok { + return + } + queryArgs, ok := c.handleQueryArgs(g) + if !ok { + return + } + queryArgs.DefaultSortByUpdatedAt() + // remap query args + queryArgs.RemapOrderBy(campaignEventColumns) + // set default sort order to desc + sortOrder := g.DefaultQuery("sortOrder", "desc") + if sortOrder == "desc" { + queryArgs.Desc = true + } + var since *time.Time + s, err := time.Parse(time.RFC3339Nano, g.Query("since")) + if err == nil { + since = &s + } + // get events by campaign id + events, err := c.CampaignService.GetEventsByCampaignID( + g.Request.Context(), + session, + id, + queryArgs, + since, + nil, + ) + // handle responses + if ok := c.handleErrors(g, err); !ok { + return + } + c.Response.OK(g, events) +} + +// ExportEventsAsCSV exports a all campaign events as a CSV +func (c *Campaign) ExportEventsAsCSV(g *gin.Context) { + session, _, ok := c.handleSession(g) + if !ok { + return + } + // parse request + id, ok := c.handleParseIDParam(g) + if !ok { + return + } + queryArgs, ok := c.handleQueryArgs(g) + if !ok { + return + } + queryArgs.DefaultSortByCreatedAt() + queryArgs.RemapOrderBy(campaignEventColumns) + sortOrder := g.DefaultQuery("sortOrder", "desc") + if sortOrder == "desc" { + queryArgs.Desc = true + } + // get all rows + queryArgs.Limit = 0 + queryArgs.Offset = 0 + // get events by campaign id + events, err := c.CampaignService.GetEventsByCampaignID( + g.Request.Context(), + session, + id, + queryArgs, + nil, + nil, + ) + if ok := c.handleErrors(g, err); !ok { + return + } + + buffer := &bytes.Buffer{} + writer := csv.NewWriter(buffer) + + headers := []string{ + "Created at", + "Recipient name", + "Recipient email", + "Event name", + "Event Details", + "User-Agent", + "IP", + } + err = writer.Write(headers) + if ok := c.handleErrors(g, err); !ok { + return + } + for _, event := range events.Rows { + row := []string{} + // if the recipient has been deleted or anonymized + if event.Recipient == nil { + row = []string{ + utils.CSVFromDate(event.CreatedAt), + "anonymized", + "anonymized", + utils.CSVRemoveFormulaStart(cache.EventNameByID[event.EventID.String()]), + utils.CSVRemoveFormulaStart(event.Data.String()), + utils.CSVRemoveFormulaStart(event.UserAgent.String()), + utils.CSVRemoveFormulaStart(event.IP.String()), + } + } else { + row = []string{ + utils.CSVFromDate(event.CreatedAt), + utils.CSVRemoveFormulaStart(event.Recipient.FirstName.MustGet().String()), + utils.CSVRemoveFormulaStart(event.Recipient.LastName.MustGet().String()), + utils.CSVRemoveFormulaStart(event.Recipient.Email.MustGet().String()), + utils.CSVRemoveFormulaStart(cache.EventNameByID[event.EventID.String()]), + utils.CSVRemoveFormulaStart(event.Data.String()), + utils.CSVRemoveFormulaStart(event.UserAgent.String()), + utils.CSVRemoveFormulaStart(event.IP.String()), + } + } + err = writer.Write(row) + if ok := c.handleErrors(g, err); !ok { + return + } + } + c.responseWithCSV(g, buffer, writer, "campaign_events.csv") +} + +// ExportSubmissionsAsCSV exports all campaign submissions as a CSV +func (c *Campaign) ExportSubmissionsAsCSV(g *gin.Context) { + session, _, ok := c.handleSession(g) + if !ok { + return + } + // parse request + id, ok := c.handleParseIDParam(g) + if !ok { + return + } + queryArgs, ok := c.handleQueryArgs(g) + if !ok { + return + } + queryArgs.DefaultSortByCreatedAt() + queryArgs.RemapOrderBy(campaignEventColumns) + sortOrder := g.DefaultQuery("sortOrder", "desc") + if sortOrder == "desc" { + queryArgs.Desc = true + } + // get all rows + queryArgs.Limit = 0 + queryArgs.Offset = 0 + + // filter for submission events only + submissionEventID := cache.EventIDByName[data.EVENT_CAMPAIGN_RECIPIENT_SUBMITTED_DATA] + eventTypeFilter := []string{submissionEventID.String()} + + // get submission events by campaign id + events, err := c.CampaignService.GetEventsByCampaignID( + g.Request.Context(), + session, + id, + queryArgs, + nil, + eventTypeFilter, + ) + if ok := c.handleErrors(g, err); !ok { + return + } + + buffer := &bytes.Buffer{} + writer := csv.NewWriter(buffer) + + headers := []string{ + "Submitted at", + "Recipient first name", + "Recipient last name", + "Recipient email", + "Submitted data", + "User-Agent", + "IP", + } + err = writer.Write(headers) + if ok := c.handleErrors(g, err); !ok { + return + } + for _, event := range events.Rows { + row := []string{} + // if the recipient has been deleted or anonymized + if event.Recipient == nil { + row = []string{ + utils.CSVFromDate(event.CreatedAt), + "anonymized", + "anonymized", + "anonymized", + utils.CSVRemoveFormulaStart(event.Data.String()), + utils.CSVRemoveFormulaStart(event.UserAgent.String()), + utils.CSVRemoveFormulaStart(event.IP.String()), + } + } else { + row = []string{ + utils.CSVFromDate(event.CreatedAt), + utils.CSVRemoveFormulaStart(event.Recipient.FirstName.MustGet().String()), + utils.CSVRemoveFormulaStart(event.Recipient.LastName.MustGet().String()), + utils.CSVRemoveFormulaStart(event.Recipient.Email.MustGet().String()), + utils.CSVRemoveFormulaStart(event.Data.String()), + utils.CSVRemoveFormulaStart(event.UserAgent.String()), + utils.CSVRemoveFormulaStart(event.IP.String()), + } + } + err = writer.Write(row) + if ok := c.handleErrors(g, err); !ok { + return + } + } + c.responseWithCSV(g, buffer, writer, "campaign_submissions.csv") +} + +func (c *Campaign) GetCampaignEmail(g *gin.Context) { + session, _, ok := c.handleSession(g) + if !ok { + return + } + // parse request + id, ok := c.handleParseIDParam(g) + if !ok { + return + } + // get email + email, err := c.CampaignService.GetCampaignEmailBody( + g.Request.Context(), + session, + id, + ) + // handle responses + if ok := c.handleErrors(g, err); !ok { + return + } + c.Response.OK(g, email) +} + +// GetCampaignURL gets a recipient landing page URL +func (c *Campaign) GetCampaignURL(g *gin.Context) { + session, _, ok := c.handleSession(g) + if !ok { + return + } + // parse request + id, ok := c.handleParseIDParam(g) + if !ok { + return + } + url, err := c.CampaignService.GetLandingPageURLByCampaignRecipientID( + g.Request.Context(), + session, + id, + ) + if ok := c.handleErrors(g, err); !ok { + return + } + c.Response.OK(g, url) +} + +// GetRecipientsByCampaignID gets recipients by campaign id +func (c *Campaign) GetRecipientsByCampaignID(g *gin.Context) { + session, _, ok := c.handleSession(g) + if !ok { + return + } + // endpoints is handled a bit differently and allows to + // fetch an unlimited amount of rows if no offset and limit is set. + // TODO this endpoint should be changed to a Result so we fetch the rows as needed. + offset := g.DefaultQuery("offset", "") + limit := g.DefaultQuery("limit", "") + // parse request + id, ok := c.handleParseIDParam(g) + if !ok { + return + } + queryArgs, ok := c.handleQueryArgs(g) + if !ok { + return + } + // special case to retrieve ALL rows + if offset == "" && limit == "" { + queryArgs.Offset = 0 + queryArgs.Limit = 0 + } + // remap query args + queryArgs.DefaultSortBy("created_at") + queryArgs.RemapOrderBy(allowedCampaignRecipientColumns) + // get recipients by campaign id + recipients, err := c.CampaignService.GetRecipientsByCampaignID( + g.Request.Context(), + session, + id, + &repository.CampaignRecipientOption{ + QueryArgs: queryArgs, + WithRecipient: true, + }, + ) + // handle responses + if ok := c.handleErrors(g, err); !ok { + return + } + c.Response.OK(g, recipients) +} + +// TrackingPixel returns a tracking pixel +func (c *Campaign) TrackingPixel(g *gin.Context) { + // get the campaign recipient id from the query + campaignRecipientID := g.Query("upn") // expect the campaign recipient id to be in here + if campaignRecipientID == "" { + c.Response.NotFound(g) + return + } + campaignRecipientUUID, err := uuid.Parse(campaignRecipientID) + if err != nil { + c.Logger.Debugw(errs.MsgFailedToParseRequest, + "error", err, + ) + c.Response.NotFound(g) + return + } + err = c.CampaignService.SaveTrackingPixelLoaded( + g, + &campaignRecipientUUID, + ) + if err != nil { + c.Logger.Debugw("failed to save tracking pixel loaded event", + "error", err, + ) + c.Response.NotFound(g) + return + } + g.Header("Content-Type", "image/gif") + if !build.Flags.Production { + g.File("./embedded/tracking-pixel/sendgrid/open.gif") + return + } + _, err = g.Writer.Write(embedded.TrackingPixel) + if err != nil { + c.Logger.Errorw("failed to write tracking pixel", "error", err) + } +} + +// UpdateByID updates a campaign by its id +func (c *Campaign) UpdateByID(g *gin.Context) { + session, _, ok := c.handleSession(g) + if !ok { + return + } + // parse request + id, ok := c.handleParseIDParam(g) + if !ok { + return + } + + var req model.Campaign + if ok := c.handleParseRequest(g, &req); !ok { + return + } + // update the campaign + err := c.CampaignService.UpdateByID(g.Request.Context(), session, id, &req) + if ok := c.handleErrors(g, err); !ok { + return + } + // handle responses + c.Response.OK(g, gin.H{}) +} + +// SetSentAtByCampaignRecipientID sets the sent at time for a campaign recipient +func (c *Campaign) SetSentAtByCampaignRecipientID(g *gin.Context) { + // handle session + session, _, ok := c.handleSession(g) + if !ok { + return + } + // parse request + id, ok := c.handleParseIDParam(g) + if !ok { + return + } + // set sent at time + err := c.CampaignService.SetSentAtByCampaignRecipientID(g.Request.Context(), session, id) + // handle responses + if ok := c.handleErrors(g, err); !ok { + return + } + c.Response.OK(g, gin.H{}) +} + +// DeleteByID deletes a campaign by its id +func (c *Campaign) DeleteByID(g *gin.Context) { + // handle session + session, _, ok := c.handleSession(g) + if !ok { + return + } + // parse request + id, ok := c.handleParseIDParam(g) + if !ok { + return + } + // delete + err := c.CampaignService.DeleteByID(g, session, id) + // handle responses + if ok := c.handleErrors(g, err); !ok { + return + } + c.Response.OK(g, gin.H{}) +} + +// AnonymizeByID anonymizes a campaign by its id +func (c *Campaign) AnonymizeByID(g *gin.Context) { + // handle session + session, _, ok := c.handleSession(g) + if !ok { + return + } + // parse request + id, ok := c.handleParseIDParam(g) + if !ok { + return + } + // anonymize + err := c.CampaignService.AnonymizeByID(g, session, id) + if ok := c.handleErrors(g, err); !ok { + return + } + c.Response.OK(g, gin.H{}) +} + +// GetCampaignStats gets campaign statistics by campaign ID +func (c *Campaign) GetCampaignStats(g *gin.Context) { + // handle session + session, _, ok := c.handleSession(g) + if !ok { + return + } + // parse request + id, ok := c.handleParseIDParam(g) + if !ok { + return + } + // get stats + stats, err := c.CampaignService.GetCampaignStats(g.Request.Context(), session, id) + if ok := c.handleErrors(g, err); !ok { + return + } + c.Response.OK(g, stats) +} + +// GetAllCampaignStats gets all campaign statistics with pagination +func (c *Campaign) GetAllCampaignStats(g *gin.Context) { + // handle session + session, _, ok := c.handleSession(g) + if !ok { + return + } + // parse request + queryArgs, ok := c.handleQueryArgs(g) + if !ok { + return + } + queryArgs.RemapOrderBy(allowedCampaignColumns) + companyID := companyIDFromRequestQuery(g) + + // get stats + stats, err := c.CampaignService.GetAllCampaignStats(g.Request.Context(), session, queryArgs, companyID) + if ok := c.handleErrors(g, err); !ok { + return + } + c.Response.OK(g, stats) +} diff --git a/backend/controller/campaignTemplate.go b/backend/controller/campaignTemplate.go new file mode 100644 index 0000000..01adf7b --- /dev/null +++ b/backend/controller/campaignTemplate.go @@ -0,0 +1,199 @@ +package controller + +import ( + "github.com/gin-gonic/gin" + "github.com/phishingclub/phishingclub/database" + "github.com/phishingclub/phishingclub/model" + "github.com/phishingclub/phishingclub/repository" + "github.com/phishingclub/phishingclub/service" +) + +// CampaignTemplateColumnsMap is a map between the frontend and the backend +// so the frontend has user friendly names instead of direct references +// to the database schema +// this is tied to a slice in the repository package +var CampaignTemplateColumnsMap = map[string]string{ + "created_at": repository.TableColumn(database.CAMPAIGN_TEMPLATE_TABLE, "created_at"), + "updated_at": repository.TableColumn(database.CAMPAIGN_TEMPLATE_TABLE, "updated_at"), + "name": repository.TableColumn(database.CAMPAIGN_TEMPLATE_TABLE, "name"), + "after_landing_page_redirect_url": repository.TableColumn(database.CAMPAIGN_TEMPLATE_TABLE, "after_landing_page_redirect_url"), + "is_complete": repository.TableColumn(database.CAMPAIGN_TEMPLATE_TABLE, "is_usable"), + "domain": repository.TableColumn(database.DOMAIN_TABLE, "name"), + "before_landing_page": repository.TableColumn("before_landing_page", "name"), + "landing_page": repository.TableColumn("landing_page", "name"), + "after_landing_page": repository.TableColumn("after_landing_page", "name"), + "smtp": repository.TableColumn(database.SMTP_CONFIGURATION_TABLE, "name"), + "api_sender": repository.TableColumn(database.API_SENDER_TABLE, "name"), + "email": repository.TableColumn(database.EMAIL_TABLE, "name"), +} + +// CampaignTemplate is a campaign template controller +type CampaignTemplate struct { + Common + CampaignTemplateService *service.CampaignTemplate +} + +// Create creates a campaign template +func (c *CampaignTemplate) Create(g *gin.Context) { + session, _, ok := c.handleSession(g) + if !ok { + return + } + // parse request + var req model.CampaignTemplate + if ok := c.handleParseRequest(g, &req); !ok { + return + } + // save + ctx := g.Request.Context() + id, err := c.CampaignTemplateService.Create(ctx, session, &req) + // handle response + if ok := c.handleErrors(g, err); !ok { + return + } + c.Response.OK( + g, + gin.H{ + "id": id.String(), + }, + ) +} + +// GetByID gets a campaign template by id +func (c *CampaignTemplate) GetByID(g *gin.Context) { + session, _, ok := c.handleSession(g) + if !ok { + return + } + // parse request + id, ok := c.handleParseIDParam(g) + if !ok { + return + } + // check if full data set should be loaded + options := &repository.CampaignTemplateOption{} + _, ok = g.GetQuery("full") + if ok { + options = &repository.CampaignTemplateOption{ + WithDomain: true, + WithSMTPConfiguration: true, + WithAPISender: true, + WithEmail: true, + WithLandingPage: true, + WithBeforeLandingPage: true, + WithAfterLandingPage: true, + WithIdentifier: true, + } + } + // get + ctx := g.Request.Context() + campaignTemplate, err := c.CampaignTemplateService.GetByID( + ctx, + session, + id, + options, + ) + // handle response + if ok := c.handleErrors(g, err); !ok { + return + } + c.Response.OK(g, campaignTemplate) +} + +// GetAll gets all campaign templates +func (c *CampaignTemplate) GetAll(g *gin.Context) { + session, _, ok := c.handleSession(g) + if !ok { + return + } + // parse request + pagination, ok := c.handlePagination(g) + if !ok { + return + } + companyID := companyIDFromRequestQuery(g) + queryArgs, ok := c.handleQueryArgs(g) + if !ok { + return + } + usableOnlyQuery := g.Query("usableOnly") + usableOnly := false + if usableOnlyQuery == "true" { + usableOnly = true + } + queryArgs.DefaultSortByUpdatedAt() + queryArgs.RemapOrderBy(CampaignTemplateColumnsMap) + columns := repository.SelectTable(database.CAMPAIGN_TEMPLATE_TABLE) + templates, err := c.CampaignTemplateService.GetAll( + g, + session, + companyID, + pagination, + &repository.CampaignTemplateOption{ + QueryArgs: queryArgs, + Columns: columns, + WithDomain: true, + WithSMTPConfiguration: true, + WithAPISender: true, + WithEmail: true, + WithLandingPage: true, + WithBeforeLandingPage: true, + WithAfterLandingPage: true, + UsableOnly: usableOnly, + }, + ) + // handle response + if ok := c.handleErrors(g, err); !ok { + return + } + c.Response.OK(g, templates) +} + +// UpdateByID updates a campaign template by id +func (c *CampaignTemplate) UpdateByID(g *gin.Context) { + session, _, ok := c.handleSession(g) + if !ok { + return + } + // parse request + id, ok := c.handleParseIDParam(g) + if !ok { + return + } + var req model.CampaignTemplate + if ok := c.handleParseRequest(g, &req); !ok { + return + } + // update + err := c.CampaignTemplateService.UpdateByID( + g.Request.Context(), + session, + id, + &req, + ) + // handle response + if ok := c.handleErrors(g, err); !ok { + return + } + c.Response.OK(g, gin.H{}) +} + +// DeleteByID deletes a campaign template by id +func (c *CampaignTemplate) DeleteByID(g *gin.Context) { + session, _, ok := c.handleSession(g) + if !ok { + return + } + // parse request + id, ok := c.handleParseIDParam(g) + if !ok { + return + } + // delete + err := c.CampaignTemplateService.DeleteByID(g, session, id) + // handle response + if ok := c.handleErrors(g, err); !ok { + return + } + c.Response.OK(g, gin.H{}) +} diff --git a/backend/controller/company.go b/backend/controller/company.go new file mode 100644 index 0000000..a30f38c --- /dev/null +++ b/backend/controller/company.go @@ -0,0 +1,594 @@ +package controller + +import ( + "archive/zip" + "bytes" + "encoding/csv" + "fmt" + + "github.com/gin-gonic/gin" + "github.com/google/uuid" + "github.com/phishingclub/phishingclub/api" + "github.com/phishingclub/phishingclub/cache" + "github.com/phishingclub/phishingclub/database" + "github.com/phishingclub/phishingclub/model" + "github.com/phishingclub/phishingclub/repository" + "github.com/phishingclub/phishingclub/service" + "github.com/phishingclub/phishingclub/utils" + "github.com/phishingclub/phishingclub/vo" +) + +// DomainColumnsMap is a map between the frontend and the backend +// so the frontend has user friendly names instead of direct references +// to the database schema +// this is tied to a slice in the repository package +var CompanyColumnsMap = map[string]string{ + "created_at": repository.TableColumn(database.COMPANY_TABLE, "created_at"), + "updated_at": repository.TableColumn(database.COMPANY_TABLE, "updated_at"), + "name": repository.TableColumn(database.COMPANY_TABLE, "name"), +} + +// Company is a Company controller +type Company struct { + Common + CompanyService *service.Company + CampaignService *service.Campaign + RecipientService *service.Recipient +} + +// GetByID gets a company by id +func (c *Company) GetByID(g *gin.Context) { + session, _, ok := c.handleSession(g) + if !ok { + return + } + companyID, err := uuid.Parse(g.Param("id")) + if err != nil { + // ignore err as caused by bad user input + _ = err + c.Response.BadRequestMessage(g, api.InvalidCompanyID) + return + } + // get company + ctx := g.Request.Context() + company, err := c.CompanyService.GetByID( + ctx, + session, + &companyID, + ) + // handle response + if ok := c.handleErrors(g, err); !ok { + return + } + c.Response.OK(g, company) +} + +// ExportByCompanyID outputs a CSV with all events related to the recipient +func (c *Company) ExportByCompanyID(g *gin.Context) { + session, _, ok := c.handleSession(g) + if !ok { + return + } + // parse request + companyID, ok := c.handleParseIDParam(g) + if !ok { + return + } + // get the company exported + company, err := c.CompanyService.GetByID( + g, + session, + companyID, + ) + // create ZIP file in memory + zipBuffer := new(bytes.Buffer) + zipWriter := zip.NewWriter(zipBuffer) + zipFileName := fmt.Sprintf("company_export_%s.zip", company.Name.MustGet().String()) + + // add company data to zip + { + buffer := &bytes.Buffer{} + writer := csv.NewWriter(buffer) + headers := []string{ + "Created at", + "Updated at", + "Name", + } + err = writer.Write(headers) + if ok := c.handleErrors(g, err); !ok { + return + } + row := []string{ + utils.CSVFromDate(company.CreatedAt), + utils.CSVFromDate(company.UpdatedAt), + utils.CSVRemoveFormulaStart(utils.NullableToString(company.Name)), + } + err = writer.Write(row) + if ok := c.handleErrors(g, err); !ok { + return + } + writer.Flush() + // add to zip + f, err := zipWriter.Create("company.csv") + if ok := c.handleErrors(g, err); !ok { + return + } + _, err = f.Write(buffer.Bytes()) + if ok := c.handleErrors(g, err); !ok { + return + } + } + + // add recipients to zip + { + // get the recipients + recipients, err := c.RecipientService.GetByCompanyID( + g, + session, + companyID, + &repository.RecipientOption{ + WithCompany: true, + WithGroups: true, + }, + ) + if ok := c.handleErrors(g, err); !ok { + return + } + // write a csv buffer with all recipient and their groups + buffer := &bytes.Buffer{} + writer := csv.NewWriter(buffer) + headers := []string{ + "Created at", + "Updated at", + "Email", + "Phone", + "Extra Identifier", + "Name", + "Position", + "Department", + "City", + "Country", + "Misc", + } + // find the recipient with the most groups and add that number of + // extra headers for groups + maxGroups := 0 + for _, recipient := range recipients.Rows { + groups, _ := recipient.Groups.Get() + if groupLen := len(groups); groupLen > maxGroups { + maxGroups = groupLen + } + } + for i := 1; i <= maxGroups; i++ { + headers = append(headers, fmt.Sprintf("Group %d", i)) + } + err = writer.Write(headers) + if ok := c.handleErrors(g, err); !ok { + return + } + for _, recipient := range recipients.Rows { + groups, _ := recipient.Groups.Get() + row := []string{ + utils.CSVFromDate(recipient.CreatedAt), + utils.CSVFromDate(recipient.UpdatedAt), + utils.CSVRemoveFormulaStart(utils.NullableToString(recipient.Email)), + utils.CSVRemoveFormulaStart(utils.NullableToString(recipient.Phone)), + utils.CSVRemoveFormulaStart(utils.NullableToString(recipient.ExtraIdentifier)), + utils.CSVRemoveFormulaStart(utils.NullableToString(recipient.FirstName)), + utils.CSVRemoveFormulaStart(utils.NullableToString(recipient.LastName)), + utils.CSVRemoveFormulaStart(utils.NullableToString(recipient.Position)), + utils.CSVRemoveFormulaStart(utils.NullableToString(recipient.Department)), + utils.CSVRemoveFormulaStart(utils.NullableToString(recipient.City)), + utils.CSVRemoveFormulaStart(utils.NullableToString(recipient.Country)), + utils.CSVRemoveFormulaStart(utils.NullableToString(recipient.Misc)), + } + for _, group := range groups { + row = append(row, group.Name.MustGet().String()) + } + err = writer.Write(row) + if ok := c.handleErrors(g, err); !ok { + return + } + writer.Flush() + } + // add to zip + f, err := zipWriter.Create("recipients.csv") + if ok := c.handleErrors(g, err); !ok { + return + } + _, err = f.Write(buffer.Bytes()) + if ok := c.handleErrors(g, err); !ok { + return + } + } + // get all campaigns all recipient events + { + campaigns, err := c.CampaignService.GetByCompanyID( + g, + session, + companyID, + &repository.CampaignOption{}, + ) + for _, campaign := range campaigns.Rows { + headers := []string{ + "Campaign", + "Created at", + "Recipient name", + "Recipient email", + "Event name", + "Event Details", + "User-Agent", + "IP", + } + buffer := &bytes.Buffer{} + writer := csv.NewWriter(buffer) + err = writer.Write(headers) + if ok := c.handleErrors(g, err); !ok { + return + } + queryArgs := vo.QueryArgs{} + queryArgs.OrderBy = repository.TableColumn( + database.CAMPAIGN_EVENT_TABLE, + "created_at", + ) + sortOrder := g.DefaultQuery("sortOrder", "desc") + if sortOrder == "desc" { + queryArgs.Desc = true + } + // get all rows + queryArgs.Limit = 0 + queryArgs.Offset = 0 + // get events by campaign id + cid := campaign.ID.MustGet() + events, err := c.CampaignService.GetEventsByCampaignID( + g.Request.Context(), + session, + &cid, + &queryArgs, + nil, + nil, + ) + if ok := c.handleErrors(g, err); !ok { + return + } + for _, event := range events.Rows { + firstName := "anonymized" + lastName := "anonymized" + recpEmail := "anonymized" + if event.Recipient != nil { + firstName = event.Recipient.FirstName.MustGet().String() + lastName = event.Recipient.LastName.MustGet().String() + recpEmail = event.Recipient.Email.MustGet().String() + } + row := []string{ + utils.CSVRemoveFormulaStart(campaign.Name.MustGet().String()), + utils.CSVFromDate(event.CreatedAt), + utils.CSVRemoveFormulaStart(firstName), + utils.CSVRemoveFormulaStart(lastName), + utils.CSVRemoveFormulaStart(recpEmail), + utils.CSVRemoveFormulaStart(cache.EventNameByID[event.EventID.String()]), + utils.CSVRemoveFormulaStart(event.Data.String()), + utils.CSVRemoveFormulaStart(event.UserAgent.String()), + utils.CSVRemoveFormulaStart(event.IP.String()), + } + err = writer.Write(row) + if ok := c.handleErrors(g, err); !ok { + return + } + } + // add a new subdirectory wit the event file in the zip + writer.Flush() + // add to zip + filename := fmt.Sprintf("campaign_events/%s.csv", campaign.Name.MustGet().String()) + f, err := zipWriter.Create(filename) + if ok := c.handleErrors(g, err); !ok { + return + } + _, err = f.Write(buffer.Bytes()) + if ok := c.handleErrors(g, err); !ok { + return + } + } + } + // close zip + err = zipWriter.Close() + if ok := c.handleErrors(g, err); !ok { + return + } + + c.responseWithZIP(g, zipBuffer, zipFileName) +} + +// ExportShared outputs a CSV with all shared recipients and events +func (c *Company) ExportShared(g *gin.Context) { + session, _, ok := c.handleSession(g) + if !ok { + return + } + // create ZIP file in memory + zipBuffer := new(bytes.Buffer) + zipWriter := zip.NewWriter(zipBuffer) + zipFileName := "shared_export_%s.zip" + // add recipients to zip + { + // get the recipients + recipients, err := c.RecipientService.GetByCompanyID( + g, + session, + nil, + &repository.RecipientOption{ + WithCompany: true, + WithGroups: true, + }, + ) + if ok := c.handleErrors(g, err); !ok { + return + } + // write a csv buffer with all recipient and their groups + buffer := &bytes.Buffer{} + writer := csv.NewWriter(buffer) + headers := []string{ + "Created at", + "Updated at", + "Email", + "Phone", + "Extra Identifier", + "Name", + "Position", + "Department", + "City", + "Country", + "Misc", + } + // find the recipient with the most groups and add that number of + // extra headers for groups + maxGroups := 0 + for _, recipient := range recipients.Rows { + groups, _ := recipient.Groups.Get() + if groupLen := len(groups); groupLen > maxGroups { + maxGroups = groupLen + } + } + for i := 1; i <= maxGroups; i++ { + headers = append(headers, fmt.Sprintf("Group %d", i)) + } + err = writer.Write(headers) + if ok := c.handleErrors(g, err); !ok { + return + } + for _, recipient := range recipients.Rows { + groups, _ := recipient.Groups.Get() + row := []string{ + utils.CSVFromDate(recipient.CreatedAt), + utils.CSVFromDate(recipient.UpdatedAt), + utils.CSVRemoveFormulaStart(utils.NullableToString(recipient.Email)), + utils.CSVRemoveFormulaStart(utils.NullableToString(recipient.Phone)), + utils.CSVRemoveFormulaStart(utils.NullableToString(recipient.ExtraIdentifier)), + utils.CSVRemoveFormulaStart(utils.NullableToString(recipient.FirstName)), + utils.CSVRemoveFormulaStart(utils.NullableToString(recipient.LastName)), + utils.CSVRemoveFormulaStart(utils.NullableToString(recipient.Position)), + utils.CSVRemoveFormulaStart(utils.NullableToString(recipient.Department)), + utils.CSVRemoveFormulaStart(utils.NullableToString(recipient.City)), + utils.CSVRemoveFormulaStart(utils.NullableToString(recipient.Country)), + utils.CSVRemoveFormulaStart(utils.NullableToString(recipient.Misc)), + } + for _, group := range groups { + row = append(row, group.Name.MustGet().String()) + } + err = writer.Write(row) + if ok := c.handleErrors(g, err); !ok { + return + } + writer.Flush() + } + // add to zip + f, err := zipWriter.Create("recipients.csv") + if ok := c.handleErrors(g, err); !ok { + return + } + _, err = f.Write(buffer.Bytes()) + if ok := c.handleErrors(g, err); !ok { + return + } + } + // get all campaigns all recipient events + { + campaigns, err := c.CampaignService.GetByCompanyID( + g, + session, + nil, + &repository.CampaignOption{}, + ) + for _, campaign := range campaigns.Rows { + headers := []string{ + "Campaign", + "Created at", + "Recipient name", + "Recipient email", + "Event name", + "Event Details", + "User-Agent", + "IP", + } + buffer := &bytes.Buffer{} + writer := csv.NewWriter(buffer) + err = writer.Write(headers) + if ok := c.handleErrors(g, err); !ok { + return + } + queryArgs := vo.QueryArgs{} + queryArgs.OrderBy = repository.TableColumn( + database.CAMPAIGN_EVENT_TABLE, + "created_at", + ) + sortOrder := g.DefaultQuery("sortOrder", "desc") + if sortOrder == "desc" { + queryArgs.Desc = true + } + // get all rows + queryArgs.Limit = 0 + queryArgs.Offset = 0 + // get events by campaign id + cid := campaign.ID.MustGet() + events, err := c.CampaignService.GetEventsByCampaignID( + g.Request.Context(), + session, + &cid, + &queryArgs, + nil, + nil, + ) + if ok := c.handleErrors(g, err); !ok { + return + } + for _, event := range events.Rows { + firstName := "anonymized" + lastName := "anonymized" + recpEmail := "anonymized" + if event.Recipient != nil { + firstName = event.Recipient.FirstName.MustGet().String() + lastName = event.Recipient.LastName.MustGet().String() + recpEmail = event.Recipient.Email.MustGet().String() + } + row := []string{ + utils.CSVRemoveFormulaStart(campaign.Name.MustGet().String()), + utils.CSVFromDate(event.CreatedAt), + utils.CSVRemoveFormulaStart(firstName), + utils.CSVRemoveFormulaStart(lastName), + utils.CSVRemoveFormulaStart(recpEmail), + utils.CSVRemoveFormulaStart(cache.EventNameByID[event.EventID.String()]), + utils.CSVRemoveFormulaStart(event.Data.String()), + utils.CSVRemoveFormulaStart(event.UserAgent.String()), + utils.CSVRemoveFormulaStart(event.IP.String()), + } + err = writer.Write(row) + if ok := c.handleErrors(g, err); !ok { + return + } + } + // add a new subdirectory wit the event file in the zip + writer.Flush() + // add to zip + filename := fmt.Sprintf("campaign_events/%s.csv", campaign.Name.MustGet().String()) + f, err := zipWriter.Create(filename) + if ok := c.handleErrors(g, err); !ok { + return + } + _, err = f.Write(buffer.Bytes()) + if ok := c.handleErrors(g, err); !ok { + return + } + } + } + // close zip + err := zipWriter.Close() + if ok := c.handleErrors(g, err); !ok { + return + } + + c.responseWithZIP(g, zipBuffer, zipFileName) +} + +// ChangeName changes a company name +func (c *Company) ChangeName(g *gin.Context) { + session, _, ok := c.handleSession(g) + if !ok { + return + } + id, ok := c.handleParseIDParam(g) + if !ok { + return + } + var req model.Company + if ok := c.handleParseRequest(g, &req); !ok { + return + } + // change company name + err := c.CompanyService.UpdateByID( + g, + session, + id, + &req, + ) + // handle response + if ok := c.handleErrors(g, err); !ok { + return + } + c.Response.OK(g, nil) +} + +// SoftDelete soft deletes a company +func (c *Company) DeleteByID(g *gin.Context) { + session, _, ok := c.handleSession(g) + if !ok { + return + } + // parse request + id, ok := c.handleParseIDParam(g) + if !ok { + return + } + // TODO company delete should FAIL if it has any relations to anything + // delete company + _, err := c.CompanyService.DeleteByID(g, session, id) + // handle response + if ok := c.handleErrors(g, err); !ok { + return + } + c.Response.OK(g, gin.H{}) +} + +// Create creates a company +func (c *Company) Create(g *gin.Context) { + // handle session + session, _, ok := c.handleSession(g) + if !ok { + return + } + // parse req + var req model.Company + if ok := c.handleParseRequest(g, &req); !ok { + return + } + // save company + ctx := g.Request.Context() + company, err := c.CompanyService.Create( + ctx, + session, + &req, + ) + // handle response + if ok := c.handleErrors(g, err); !ok { + return + } + c.Response.OK(g, gin.H{ + "id": company.ID, + }) +} + +// GetAll gets all companies with pagination +func (c *Company) GetAll(g *gin.Context) { + session, _, ok := c.handleSession(g) + if !ok { + return + } + // parse request + queryArgs, ok := c.handleQueryArgs(g) + if !ok { + return + } + queryArgs.DefaultSortByUpdatedAt() + queryArgs.RemapOrderBy(CompanyColumnsMap) + // get companies + ctx := g.Request.Context() + companies, err := c.CompanyService.GetAll( + ctx, + session, + queryArgs, + ) + // handle response + if ok := c.handleErrors(g, err); !ok { + return + } + c.Response.OK(g, companies) +} diff --git a/backend/controller/domain.go b/backend/controller/domain.go new file mode 100644 index 0000000..26a3028 --- /dev/null +++ b/backend/controller/domain.go @@ -0,0 +1,219 @@ +package controller + +import ( + "github.com/gin-gonic/gin" + "github.com/phishingclub/phishingclub/database" + "github.com/phishingclub/phishingclub/model" + "github.com/phishingclub/phishingclub/repository" + + "github.com/phishingclub/phishingclub/service" + "github.com/phishingclub/phishingclub/vo" +) + +// DomainColumnsMap is a map between the frontend and the backend +// so the frontend has user friendly names instead of direct references +// to the database schema +// this is tied to a slice in the repository package +var DomainColumnsMap = map[string]string{ + "created_at": repository.TableColumn(database.DOMAIN_TABLE, "created_at"), + "updated_at": repository.TableColumn(database.DOMAIN_TABLE, "updated_at"), + "hosting_website": repository.TableColumn(database.DOMAIN_TABLE, "host_website"), + "redirects": repository.TableColumn(database.DOMAIN_TABLE, "redirect_url"), +} + +// Domain +type Domain struct { + Common + DomainService *service.Domain +} + +// Create creates a domain +func (d *Domain) Create(g *gin.Context) { + session, _, ok := d.handleSession(g) + if !ok { + return + } + // parse request + var req model.Domain + if ok := d.handleParseRequest(g, &req); !ok { + return + } + // save domain + id, err := d.DomainService.Create(g, session, &req) + // handle response + if ok := d.handleErrors(g, err); !ok { + return + } + d.Response.OK( + g, + gin.H{ + "id": id, + }, + ) +} + +// GetAll gets domains +func (d *Domain) GetAll(g *gin.Context) { + // handle session + session, _, ok := d.handleSession(g) + if !ok { + return + } + // parse request + companyID := companyIDFromRequestQuery(g) + queryArgs, ok := d.handleQueryArgs(g) + if !ok { + return + } + queryArgs.DefaultSortByUpdatedAt() + queryArgs.RemapOrderBy(DomainColumnsMap) + // get domain + domains, err := d.DomainService.GetAll( + companyID, + g.Request.Context(), + session, + queryArgs, + true, // TODO there might not be any reason to retrieve the full relation here - optimize by removing it (false) + ) + if ok := d.handleErrors(g, err); !ok { + return + } + d.Response.OK(g, domains) +} + +// GetAllOverview gets domains with limited data +func (d *Domain) GetAllOverview(g *gin.Context) { + // handle session + session, _, ok := d.handleSession(g) + if !ok { + return + } + // parse request + companyID := companyIDFromRequestQuery(g) + queryArgs, ok := d.handleQueryArgs(g) + if !ok { + return + } + queryArgs.DefaultSortByUpdatedAt() + queryArgs.RemapOrderBy(DomainColumnsMap) + // get domains + domains, err := d.DomainService.GetAllOverview( + companyID, + g.Request.Context(), + session, + queryArgs, + ) + if ok := d.handleErrors(g, err); !ok { + return + } + d.Response.OK(g, domains) +} + +// GetByID gets a domain by id +func (d *Domain) GetByID(g *gin.Context) { + // handle session + session, _, ok := d.handleSession(g) + if !ok { + return + } + // parse request + id, ok := d.handleParseIDParam(g) + if !ok { + return + } + // get domain + ctx := g.Request.Context() + domain, err := d.DomainService.GetByID( + ctx, + session, + id, + &repository.DomainOption{ + WithCompany: true, + }, + ) + if ok := d.handleErrors(g, err); !ok { + return + } + d.Response.OK(g, domain) +} + +// GetByName gets a domain by name +func (d *Domain) GetByName(g *gin.Context) { + // handle session + session, _, ok := d.handleSession(g) + if !ok { + return + } + // parse request + name, err := vo.NewString255(g.Param("domain")) + if ok := d.handleErrors(g, err); !ok { + return + } + // get domain + ctx := g.Request.Context() + domain, err := d.DomainService.GetByName( + ctx, + session, + name, + &repository.DomainOption{}, + ) + if ok := d.handleErrors(g, err); !ok { + return + } + d.Response.OK(g, domain) +} + +// UpdateByID updates a domain by id +func (d *Domain) UpdateByID(g *gin.Context) { + // handle session + session, _, ok := d.handleSession(g) + if !ok { + return + } + // parse request + id, ok := d.handleParseIDParam(g) + if !ok { + return + } + var req model.Domain + if ok := d.handleParseRequest(g, &req); !ok { + return + } + // update domain + err := d.DomainService.UpdateByID( + g, + session, + id, + &req, + ) + // handle response + if ok := d.handleErrors(g, err); !ok { + return + } + d.Response.OK(g, gin.H{}) +} + +// DeleteByID deletes a domain by id +func (d *Domain) DeleteByID(g *gin.Context) { + // handle session + session, _, ok := d.handleSession(g) + if !ok { + return + } + // parse request + id, ok := d.handleParseIDParam(g) + if !ok { + return + } + // delete domain + err := d.DomainService.DeleteByID( + g, + session, + id, + ) + // handle response + if ok := d.handleErrors(g, err); !ok { + return + } + d.Response.OK(g, gin.H{}) +} diff --git a/backend/controller/email.go b/backend/controller/email.go new file mode 100644 index 0000000..ef76ecc --- /dev/null +++ b/backend/controller/email.go @@ -0,0 +1,375 @@ +package controller + +import ( + "github.com/gin-gonic/gin" + "github.com/google/uuid" + "github.com/oapi-codegen/nullable" + "github.com/phishingclub/phishingclub/database" + "github.com/phishingclub/phishingclub/errs" + "github.com/phishingclub/phishingclub/model" + "github.com/phishingclub/phishingclub/repository" + "github.com/phishingclub/phishingclub/service" + "github.com/phishingclub/phishingclub/vo" +) + +// EmailOrderByMap is a map between the frontend and the backend +// so the frontend has user friendly names instead of direct references +// to the database schema +// this is tied to a slice in the repository package +var EmailOrderByMap = map[string]string{ + "created_at": repository.TableColumn(database.EMAIL_TABLE, "created_at"), + "updated_at": repository.TableColumn(database.EMAIL_TABLE, "created_at"), + "name": repository.TableColumn(database.EMAIL_TABLE, "name"), + "mail_from": repository.TableColumn(database.EMAIL_TABLE, "mail_from"), + "from": repository.TableColumn(database.EMAIL_TABLE, "from"), + "subject": repository.TableColumn(database.EMAIL_TABLE, "subject"), + "tracking_pixel": repository.TableColumn(database.EMAIL_TABLE, "add_tracking_pixel"), +} + +// AddAttachmentsToEmailRequest is a request to add attachments to a message +type AddAttachmentsToEmailRequest struct { + Attachments []string `json:"ids"` // attachment IDs +} + +// RemoveAttachmentFromEmailRequest is a request to remove an attachment from a message +type RemoveAttachmentFromEmailRequest struct { + AttachmentID string `json:"attachmentID"` +} + +// SendTestEmailRequest is a request for sending a test of an e-mail +type SendTestEmailRequest struct { + SMTPID *uuid.UUID + DomainID *uuid.UUID + RecipientID *uuid.UUID +} + +// Email is a Email controller +type Email struct { + Common + EmailService *service.Email + TemplateService *service.Template + EmailRepository *repository.Email +} + +// AddAttachments adds attachments to a email +func (m *Email) AddAttachments(g *gin.Context) { + session, _, ok := m.handleSession(g) + if !ok { + return + } + // parse request + var request AddAttachmentsToEmailRequest + if ok := m.handleParseRequest(g, &request); !ok { + return + } + id, ok := m.handleParseIDParam(g) + if !ok { + return + } + if len(request.Attachments) == 0 { + m.Response.BadRequestMessage(g, "No attachments provided") + return + } + attachmentIDs := []*uuid.UUID{} + for _, idParam := range request.Attachments { + id, err := uuid.Parse(idParam) + if err != nil { + m.Logger.Debugw(errs.MsgFailedToParseUUID, + "error", err, + ) + m.Response.BadRequestMessage(g, "Invalid attachment ID") + return + } + attachmentIDs = append(attachmentIDs, &id) + } + // add attachments to email + err := m.EmailService.AddAttachments( + g.Request.Context(), + session, + id, + attachmentIDs, + ) + // handle responses + if ok := m.handleErrors(g, err); !ok { + return + } + m.Response.OK(g, gin.H{}) +} + +// RemoveAttachment removes an attachment from a email +func (m *Email) RemoveAttachment(g *gin.Context) { + // handle session + session, _, ok := m.handleSession(g) + if !ok { + return + } + // parse req + var req RemoveAttachmentFromEmailRequest + if ok := m.handleParseRequest(g, &req); !ok { + return + } + attachmentID, err := uuid.Parse(req.AttachmentID) + if err != nil { + m.Logger.Debugw(errs.MsgFailedToParseUUID, + "error", err, + ) + m.Response.BadRequestMessage(g, "Invalid attachment ID") + return + } + emailID, err := uuid.Parse(g.Param("id")) + if err != nil { + m.Logger.Debugw(errs.MsgFailedToParseUUID, "error", err) + m.Response.BadRequestMessage(g, "Invalid message ID") + return + } + // remove attachment from email + err = m.EmailService.RemoveAttachment( + g.Request.Context(), + session, + &emailID, + &attachmentID, + ) + // handle responses + if ok := m.handleErrors(g, err); !ok { + return + } + m.Response.OK(g, gin.H{}) +} + +// Create creates a email +func (m *Email) Create(g *gin.Context) { + session, _, ok := m.handleSession(g) + if !ok { + return + } + // parse req + var req model.Email + if ok := m.handleParseRequest(g, &req); !ok { + return + } + // save email + id, err := m.EmailService.Create( + g, + session, + &req, + ) + // handle responses + if ok := m.handleErrors(g, err); !ok { + return + } + m.Response.OK(g, gin.H{ + "id": id, + }) +} + +// SendTestEmail +func (m *Email) SendTestEmail(g *gin.Context) { + session, _, ok := m.handleSession(g) + if !ok { + return + } + // parse request + id, ok := m.handleParseIDParam(g) + if !ok { + return + } + var req SendTestEmailRequest + if ok := m.handleParseRequest(g, &req); !ok { + return + } + // send test email + err := m.EmailService.SendTestEmail( + g, + session, + id, + req.SMTPID, + req.DomainID, + req.RecipientID, + ) + // handle responses + if ok := m.handleErrors(g, err); !ok { + return + } + m.Response.OK(g, gin.H{}) +} + +// GetByID gets a email by ID +func (m *Email) GetByID(g *gin.Context) { + session, _, ok := m.handleSession(g) + if !ok { + return + } + // parse request + id, ok := m.handleParseIDParam(g) + if !ok { + return + } + // get email + email, err := m.EmailService.GetByID( + g.Request.Context(), + session, + id, + ) + // handle responses + if ok := m.handleErrors(g, err); !ok { + return + } + m.Response.OK(g, email) +} + +// GetContentByID gets a email content by ID +func (m *Email) GetContentByID(g *gin.Context) { + session, _, ok := m.handleSession(g) + if !ok { + return + } + // parse request + id, ok := m.handleParseIDParam(g) + if !ok { + return + } + // get + email, err := m.EmailService.GetByID( + g.Request.Context(), + session, + id, + ) + if ok := m.handleErrors(g, err); !ok { + return + } + // build email + domain := &model.Domain{ + Name: nullable.NewNullableWithValue( + *vo.NewString255Must("example.test"), + ), + } + recipient := model.NewRecipientExample() + campaignRecipient := model.CampaignRecipient{ + ID: nullable.NewNullableWithValue( + uuid.New(), + ), + Recipient: recipient, + } + apiSender := model.NewAPISenderExample() + emailBody, err := m.TemplateService.CreateMailBody( + "id", + "/foo", + domain, + &campaignRecipient, + email, + apiSender, + ) + if ok := m.handleErrors(g, err); !ok { + return + } + m.Response.OK(g, emailBody) +} + +// GetAll gets all emails using pagination +func (m *Email) GetAll(g *gin.Context) { + // handle session + session, _, ok := m.handleSession(g) + if !ok { + return + } + companyID := companyIDFromRequestQuery(g) + queryArgs, ok := m.handleQueryArgs(g) + if !ok { + return + } + queryArgs.DefaultSortByName() + queryArgs.RemapOrderBy(EmailOrderByMap) + emails, err := m.EmailService.GetAll( + g.Request.Context(), + session, + companyID, + queryArgs, + ) + // handle responses + if ok := m.handleErrors(g, err); !ok { + return + + } + m.Response.OK(g, emails) +} + +// GetOverviews gets all email overviews using pagination +func (m *Email) GetOverviews(g *gin.Context) { + // handle session + session, _, ok := m.handleSession(g) + if !ok { + return + } + companyID := companyIDFromRequestQuery(g) + queryArgs, ok := m.handleQueryArgs(g) + if !ok { + return + } + queryArgs.RemapOrderBy(EmailOrderByMap) + queryArgs.DefaultSortByName() + emails, err := m.EmailService.GetOverviews( + g.Request.Context(), + session, + companyID, + queryArgs, + ) + // handle responses + if ok := m.handleErrors(g, err); !ok { + return + + } + m.Response.OK(g, emails) +} + +// UpdateByID updates a message by ID +func (m *Email) UpdateByID(g *gin.Context) { + session, _, ok := m.handleSession(g) + if !ok { + return + } + // parse request + id, ok := m.handleParseIDParam(g) + if !ok { + return + } + var email model.Email + if ok := m.handleParseRequest(g, &email); !ok { + return + } + // update message + err := m.EmailService.UpdateByID( + g.Request.Context(), + session, + id, + &email, + ) + // handle response + if ok := m.handleErrors(g, err); !ok { + return + } + m.Response.OK(g, gin.H{}) +} + +// DeleteByID deletes a message by ID +func (m *Email) DeleteByID(g *gin.Context) { + session, _, ok := m.handleSession(g) + if !ok { + return + } + // parse request + id, ok := m.handleParseIDParam(g) + if !ok { + return + } + // delete message + err := m.EmailService.DeleteByID( + g.Request.Context(), + session, + id, + ) + // handle response + if ok := m.handleErrors(g, err); !ok { + return + } + m.Response.OK(g, gin.H{}) +} diff --git a/backend/controller/health.go b/backend/controller/health.go new file mode 100644 index 0000000..1fce4c6 --- /dev/null +++ b/backend/controller/health.go @@ -0,0 +1,15 @@ +package controller + +import ( + "net/http" + + "github.com/gin-gonic/gin" +) + +// Health is the Health controller +type Health struct{} + +// Health returns a 200 OK +func (c *Health) Health(g *gin.Context) { + g.Status(http.StatusOK) +} diff --git a/backend/controller/identifier.go b/backend/controller/identifier.go new file mode 100644 index 0000000..00428d3 --- /dev/null +++ b/backend/controller/identifier.go @@ -0,0 +1,51 @@ +package controller + +import ( + "github.com/gin-gonic/gin" + "github.com/phishingclub/phishingclub/database" + "github.com/phishingclub/phishingclub/repository" + "github.com/phishingclub/phishingclub/service" +) + +// IdentifierColumnsMap is a map between the frontend and the backend +// so the frontend has user friendly names instead of direct references +// to the database schema +// this is tied to a slice in the repository package +var IdentifierColumnsMap = map[string]string{ + "name": repository.TableColumn(database.IDENTIFIER_TABLE, "name"), +} + +type Identifier struct { + Common + IdentifierService *service.Identifier +} + +// GetAll gets all identifiers +func (c *Identifier) GetAll(g *gin.Context) { + session, _, ok := c.handleSession(g) + if !ok { + return + } + // parse request + queryArgs, ok := c.handleQueryArgs(g) + if !ok { + return + } + queryArgs.DefaultSortByName() + // get + identifiers, err := c.IdentifierService.GetAll( + g, + session, + &repository.IdentifierOption{ + QueryArgs: queryArgs, + }, + ) + // handle response + if ok := c.handleErrors(g, err); !ok { + return + } + c.Response.OK( + g, + identifiers, + ) +} diff --git a/backend/controller/import.go b/backend/controller/import.go new file mode 100644 index 0000000..b13e069 --- /dev/null +++ b/backend/controller/import.go @@ -0,0 +1,47 @@ +package controller + +import ( + "github.com/gin-gonic/gin" + "github.com/google/uuid" + "github.com/phishingclub/phishingclub/service" +) + +// Import handles import for templates like emails, landing pages and so on +type Import struct { + Common + ImportService *service.Import +} + +// Import imports a .zip file +func (im *Import) Import(g *gin.Context) { + session, _, ok := im.handleSession(g) + if !ok { + return + } + // parse request + f, err := g.FormFile("file") + // handle responses + if ok := im.handleErrors(g, err); !ok { + return + } + + // Read forCompany flag from form (treat "1" or "true" as true) + forCompany := false + if v := g.PostForm("forCompany"); v == "1" || v == "true" { + forCompany = true + } + + // Read companyID from form data if provided + var companyID *uuid.UUID + if companyIDStr := g.PostForm("companyID"); companyIDStr != "" { + if cid, err := uuid.Parse(companyIDStr); err == nil { + companyID = &cid + } + } + + summary, err := im.ImportService.Import(g, session, f, forCompany, companyID) + if ok := im.handleErrors(g, err); !ok { + return + } + im.Response.OK(g, summary) +} diff --git a/backend/controller/install.go b/backend/controller/install.go new file mode 100644 index 0000000..7cfa9b6 --- /dev/null +++ b/backend/controller/install.go @@ -0,0 +1,329 @@ +package controller + +import ( + "fmt" + + "github.com/go-errors/errors" + "github.com/google/uuid" + + "github.com/gin-gonic/gin" + "github.com/phishingclub/phishingclub/cli" + "github.com/phishingclub/phishingclub/data" + "github.com/phishingclub/phishingclub/errs" + "github.com/phishingclub/phishingclub/model" + "github.com/phishingclub/phishingclub/password" + "github.com/phishingclub/phishingclub/repository" + "github.com/phishingclub/phishingclub/service" + "github.com/phishingclub/phishingclub/vo" + "golang.org/x/net/context" + "gorm.io/gorm" +) + +// SetupAdminRequest is the request for the install action +type SetupAdminRequest struct { + Username string `json:"username" binding:"required"` + UserFullname string `json:"userFullname" binding:"required"` + NewPassword string `json:"newPassword" binding:"required"` +} + +// InitialSetup is a controller used by the CLI in the +// initial setup process - it is not an API controller +type InitialSetup struct { + Common + CLIOutputter cli.Outputter + OptionRepository *repository.Option + InstallService *service.InstallSetup + OptionService *service.Option +} + +// IsInstalled checks if the application is installed +// not as a +func (is *InitialSetup) IsInstalled(ctx context.Context) (bool, error) { + isInstalledOption, err := is.OptionRepository.GetByKey( + ctx, + data.OptionKeyIsInstalled, + ) + if errors.Is(err, gorm.ErrRecordNotFound) { + return false, nil + } + if err != nil { + return false, fmt.Errorf("could not get '%s' option: %w", data.OptionKeyIsInstalled, err) + } + return isInstalledOption.Value.String() == data.OptionValueIsInstalled, nil +} + +// HandleInitialSetup handles the initial setup of the application +// this includes inserting the isInstalled option to not installed +// and making or updating the sacrificial admin account +func (is *InitialSetup) HandleInitialSetup(ctx context.Context) error { + // setup option for is installed + isInstalledOption, err := is.OptionRepository.GetByKey( + ctx, + data.OptionKeyIsInstalled, + ) + // if the option does not exist, create it + if err != nil { + if !errors.Is(err, gorm.ErrRecordNotFound) { + return fmt.Errorf("%w: could not get '%s' option", err, data.OptionKeyIsInstalled) + } + key := vo.NewString64Must(data.OptionKeyIsInstalled) + value := vo.NewOptionalString1MBMust(data.OptionValueIsNotInstalled) + isInstalledOptionWithoutID := model.Option{ + Key: *key, + Value: *value, + } + _, err = is.OptionRepository.Insert( + ctx, + &isInstalledOptionWithoutID, + ) + if err != nil { + return fmt.Errorf("%w: could not insert entity for option '%s'", err, data.OptionKeyIsInstalled) + } + isInstalledOption, err = is.OptionRepository.GetByKey( + ctx, + isInstalledOptionWithoutID.Key.String(), + ) + if err != nil { + return fmt.Errorf("%w: could not get created '%s' option", err, data.OptionKeyIsInstalled) + } + } + // if no instance ID exists, add it + instanceIDOption, err := is.OptionRepository.GetByKey( + ctx, + data.OptionKeyInstanceID, + ) + // if the instance id option does not exist, create it + if err != nil { + if !errors.Is(err, gorm.ErrRecordNotFound) { + return fmt.Errorf("%w: could not get '%s' option", err, data.OptionKeyInstanceID) + } + key := vo.NewString64Must(data.OptionKeyInstanceID) + instanceID := uuid.New() + value := vo.NewOptionalString1MBMust(instanceID.String()) + instanceIDOption = &model.Option{ + Key: *key, + Value: *value, + } + _, err = is.OptionRepository.Insert( + ctx, + instanceIDOption, + ) + if err != nil { + return fmt.Errorf("could not insert instance ID: %w", err) + } + } + + // if installation is already complete, return error + if isInstalledOption.Value.String() == data.OptionValueIsInstalled { + return errs.ErrAlreadyInstalled + } + // setup accounts + admin, password, err := is.InstallService.SetupAccounts(ctx) + if err != nil { + return fmt.Errorf("could not setup initial admin account: %w", err) + } + is.CLIOutputter.PrintInitialAdminAccount( + admin.Username.MustGet().String(), + password.String(), + ) + + return nil +} + +// Install is the Install controller used by the API +type Install struct { + Common + UserRepository *repository.User + CompanyRepository *repository.Company + OptionRepository *repository.Option + DB *gorm.DB + PasswordHasher password.Argon2Hasher +} + +// Install completes the installation by setting the initial administrators and options +func (in *Install) Install(g *gin.Context) { + tx := in.DB.Begin() + defer func() { + if r := recover(); r != nil { + tx.Rollback() + } + }() + ok := in.install(g, tx) + if !ok { + if tx.Rollback().Error != nil { + in.Logger.Errorw("failed to install - could not rollback transaction", + "error", tx.Rollback().Error, + ) + } + return + } + result := tx.Commit() + if result.Error != nil { + in.Logger.Errorw("failed to install - could not commit transaction", + "error", result.Error, + ) + in.Response.ServerError(g) + return + } + // the admin user changed username and password + // however as the install process is a special case, we wont + // require re-authentication + in.Response.OK(g, gin.H{}) +} + +// Install completes the installation by setting the initial administrators +// username, password, email, name and company name +func (in *Install) install(g *gin.Context, tx *gorm.DB) bool { + // handle session + _, user, ok := in.handleSession(g) + if !ok { + return false + } + role := user.Role + if role == nil { + in.Logger.Error("failed to install - session contain no role") + in.Response.ServerError(g) + return false + } + if !role.IsSuperAdministrator() { + in.Logger.Info("failed to install - not super admin") + // TODO add audit log + in.Response.Forbidden(g) + return false + } + // defer rollback or commit tx + var request SetupAdminRequest + if err := g.ShouldBindJSON(&request); err != nil { + in.Logger.Debugw("failed to parse request", + "error", err, + ) + in.Response.BadRequest(g) + return false + } + ctx := g.Request.Context() + // check if already installed + isInstalled, err := in.OptionRepository.GetByKey(ctx, data.OptionKeyIsInstalled) + if err != nil { + in.Logger.Errorw("failed to install - could not get option", + "optionKey", data.OptionKeyIsInstalled, + "error", err, + ) + in.Response.ServerError(g) + return false + } + if isInstalled.Value.String() == data.OptionValueIsInstalled { + in.Logger.Info("failed to install - already installed") + in.Response.ServerErrorMessage( + g, + "Installation is already complete", + ) + return false + } + // update the username + newUsername, err := vo.NewUsername(request.Username) + if err != nil { + in.Logger.Infow("failed to install - invalid username", + "username", request.Username, + "error", err, + ) + in.Response.ValidationFailed(g, "Username", err) + return false + } + if newUsername.String() == user.Username.MustGet().String() { + in.Logger.Infow("failed to install - new username is the same as the current", + "username", newUsername.String(), + "error", err, + ) + in.Response.BadRequestMessage( + g, + "Username may not be the same as the current", + ) + return false + } + userID := user.ID.MustGet() + err = in.UserRepository.UpdateUsernameByIDWithTransaction( + ctx, + tx, + &userID, + newUsername, + ) + if err != nil { + in.Logger.Infow("failed to install - could not update username", + "username", newUsername.String(), + "error", err, + ) + in.Response.ServerError(g) + return false + } + // update the password + newPassword, err := vo.NewReasonableLengthPassword(request.NewPassword) + if err != nil { + in.Logger.Infow("failed to install - invalid password", + "error", err, + ) + in.Response.BadRequestMessage(g, "invalid password") + return false + } + hash, err := in.PasswordHasher.Hash(newPassword.String()) + if err != nil { + in.Logger.Errorw("failed to install - could not hash password", + "error", err, + ) + in.Response.ServerError(g) + return false + } + err = in.UserRepository.UpdatePasswordHashByIDWithTransaction( + ctx, + tx, + &userID, + hash, + ) + if err != nil { + in.Logger.Errorw("failed to install - could not update password", + "error", err, + ) + in.Response.ServerError(g) + return false + } + // update the name + newName, err := vo.NewUserFullname(request.UserFullname) + if err != nil { + in.Logger.Infow("failed to install - invalid name", + "error", err, + ) + in.Response.ValidationFailed(g, "Name", err) + return false + } + err = in.UserRepository.UpdateFullNameByIDWithTransaction( + ctx, + tx, + &userID, + newName, + ) + if err != nil { + in.Logger.Infow("failed to install - could not update name", + "error", err, + ) + in.Response.ServerError(g) + return false + } + // update installed option to installed + option := model.Option{ + Key: *vo.NewString64Must(data.OptionKeyIsInstalled), + Value: *vo.NewOptionalString1MBMust(data.OptionValueIsInstalled), + } + err = in.OptionRepository.UpdateByKeyWithTransaction( + ctx, + tx, + &option, + ) + if err != nil { + in.Logger.Errorw("failed to install - could not create install option", + "error", err, + ) + in.Response.ServerErrorMessage(g, "failed to create install option") + return false + + } + return true +} diff --git a/backend/controller/log.go b/backend/controller/log.go new file mode 100644 index 0000000..5eef99a --- /dev/null +++ b/backend/controller/log.go @@ -0,0 +1,214 @@ +package controller + +import ( + "context" + "time" + + "github.com/go-errors/errors" + + "github.com/gin-gonic/gin" + "github.com/phishingclub/phishingclub/data" + "github.com/phishingclub/phishingclub/errs" + "github.com/phishingclub/phishingclub/model" + "github.com/phishingclub/phishingclub/service" + "github.com/phishingclub/phishingclub/vo" + "go.uber.org/zap" + "gorm.io/gorm" + "gorm.io/gorm/logger" +) + +type SetLevelRequest struct { + Level string `json:"level"` + DBLevel string `json:"dbLevel"` +} + +type Log struct { + Common + OptionService *service.Option + Database *gorm.DB + LoggerAtom *zap.AtomicLevel +} + +// Panic is a test utility +func (c *Log) Panic(g *gin.Context) { + session, _, ok := c.handleSession(g) + if !ok { + return + } + if session == nil { + if ok := c.handleErrors(g, errors.New("no session")); !ok { + return + } + } + c.Deeper() +} + +func (c *Log) Deeper() { + panic("panic test") +} + +// Slow is a test utility +func (c *Log) Slow(g *gin.Context) { + session, _, ok := c.handleSession(g) + if !ok { + return + } + if session == nil { + if ok := c.handleErrors(g, errors.New("no session")); !ok { + return + } + } + c.Logger.Debugf("Slow request testing start") + time.Sleep(10 * time.Second) + c.Logger.Debugf("Slow request testing stop") + c.Response.OK(g, gin.H{}) +} + +// GetLevel gets the log level +func (c *Log) GetLevel(g *gin.Context) { + session, _, ok := c.handleSession(g) + if !ok { + return + } + // get the log levels + logLevelOption, err := c.OptionService.GetOption( + g, + session, + data.OptionKeyLogLevel, + ) + // handle errors + if ok := c.handleErrors(g, err); !ok { + return + } + dbLogLevelOption, err := c.OptionService.GetOption(g, session, data.OptionKeyDBLogLevel) + // handle response + if ok := c.handleErrors(g, err); !ok { + return + } + c.Response.OK(g, gin.H{ + "level": logLevelOption.Value, + "dbLevel": dbLogLevelOption.Value, + }) +} + +// SetLevel sets the log level +func (c *Log) SetLevel(g *gin.Context) { + session, _, ok := c.handleSession(g) + if !ok { + return + } + // parse request + var request SetLevelRequest + if ok := c.handleParseRequest(g, &request); !ok { + return + } + if request.Level == "" && request.DBLevel == "" { + c.Response.BadRequestMessage(g, "level or dbLevel is required") + return + } + if request.DBLevel != "" { + switch request.DBLevel { + case "silent": + c.Database.Logger = c.Database.Logger.LogMode(logger.Silent) + case "info": + c.Database.Logger = c.Database.Logger.LogMode(logger.Info) + case "warn": + c.Database.Logger = c.Database.Logger.LogMode(logger.Warn) + case "error": + c.Database.Logger = c.Database.Logger.LogMode(logger.Error) + default: + c.Logger.Debugw("invalid db log level", + "level", request.DBLevel, + ) + c.Response.BadRequestMessage(g, "unknown DB log level") + return + } + // set db log level in database + dbLevel := vo.NewOptionalString1MBMust(request.DBLevel) + dbLogLevelOption := model.Option{ + Key: *vo.NewString64Must(data.OptionKeyDBLogLevel), + Value: *dbLevel, + } + err := c.persist( + g, + session, + &dbLogLevelOption, + ) + // handle response + if ok := c.handleErrors(g, err); !ok { + return + } + } + if request.Level != "" { + switch request.Level { + case "debug": + c.LoggerAtom.SetLevel(zap.DebugLevel) + case "info": + c.LoggerAtom.SetLevel(zap.InfoLevel) + case "warn": + c.LoggerAtom.SetLevel(zap.WarnLevel) + case "error": + c.LoggerAtom.SetLevel(zap.ErrorLevel) + default: + c.Logger.Debugw("invalid log level", + "level", request.Level, + ) + c.Response.BadRequestMessage(g, "Unknown log level") + return + } + + // set log level in in memory logger struct + logLevel := model.Option{ + Key: *vo.NewString64Must(data.OptionKeyLogLevel), + Value: *vo.NewOptionalString1MBMust(request.Level), + } + err := c.persist( + g, + session, + &logLevel, + ) + if ok := c.handleErrors(g, err); !ok { + return + } + } + c.Response.OK(g, nil) +} + +// TestLog tests the log +// Sends a log message for each log level debug, info, warn, error +func (c *Log) TestLog(g *gin.Context) { + session, _, ok := c.handleSession(g) + if !ok { + return + } + // check permissions + isAuthorized, err := service.IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + handleServerError(g, c.Response, err) + return + } + if !isAuthorized { + // TODO audit log + c.Response.Unauthorized(g) + return + } + c.Logger.Debug("Log: DEBUG Test") + c.Logger.Info("Log: INFO Test") + c.Logger.Warn("Log: WARN Test") + c.Logger.Error("Log: ERROR Test") + c.Response.OK(g, nil) +} + +// persit saves the log level +// TODO this has become empty and superflous +func (c *Log) persist( + ctx context.Context, + session *model.Session, + logLevel *model.Option, +) error { + return c.OptionService.SetOptionByKey( + ctx, + session, + logLevel, + ) +} diff --git a/backend/controller/option.go b/backend/controller/option.go new file mode 100644 index 0000000..cdee625 --- /dev/null +++ b/backend/controller/option.go @@ -0,0 +1,67 @@ +package controller + +import ( + "github.com/gin-gonic/gin" + "github.com/phishingclub/phishingclub/data" + "github.com/phishingclub/phishingclub/model" + "github.com/phishingclub/phishingclub/service" +) + +// Option is a Option controller +type Option struct { + Common + OptionService *service.Option +} + +// Get a update option +func (c *Option) Get(g *gin.Context) { + // handle session + session, _, ok := c.handleSession(g) + if !ok { + return + } + // parse request + key := g.Param("key") + if key == "" { + c.Response.BadRequestMessage(g, "option is required") + return + } + ctx := g.Request.Context() + option, err := c.OptionService.GetOption( + ctx, + session, + key, + ) + if ok := handleServerError(g, c.Response, err); !ok { + return + } + if key == data.OptionKeyAdminSSOLogin { + option, err = c.OptionService.MaskSSOSecret(option) + if ok := handleServerError(g, c.Response, err); !ok { + return + } + } + c.Response.OK(g, option) +} + +// Update sets a option +func (c *Option) Update(g *gin.Context) { + session, _, ok := c.handleSession(g) + if !ok { + return + } + // parse req + var req model.Option + if ok := c.handleParseRequest(g, &req); !ok { + return + } + err := c.OptionService.SetOptionByKey(g, session, &req) + // handle response + if ok := c.handleErrors(g, err); !ok { + return + } + c.Response.OK( + g, + gin.H{}, + ) +} diff --git a/backend/controller/page.go b/backend/controller/page.go new file mode 100644 index 0000000..9c757c5 --- /dev/null +++ b/backend/controller/page.go @@ -0,0 +1,230 @@ +package controller + +import ( + "github.com/gin-gonic/gin" + "github.com/phishingclub/phishingclub/database" + "github.com/phishingclub/phishingclub/model" + "github.com/phishingclub/phishingclub/repository" + "github.com/phishingclub/phishingclub/service" +) + +// PageColumnsMap is a map between the frontend and the backend +// so the frontend has user friendly names instead of direct references +// to the database schema +// this is tied to a slice in the repository package +var PageColumnsMap = map[string]string{ + "created_at": repository.TableColumn(database.PAGE_TABLE, "created_at"), + "updated_at": repository.TableColumn(database.PAGE_TABLE, "updated_at"), + "name": repository.TableColumn(database.PAGE_TABLE, "name"), +} + +// Page is a Page controller +type Page struct { + Common + PageService *service.Page + TemplateService *service.Template +} + +// Create creates a page +func (p *Page) Create(g *gin.Context) { + // handle session + session, _, ok := p.handleSession(g) + if !ok { + return + } + // parse req + var req model.Page + if ok := p.handleParseRequest(g, &req); !ok { + return + } + // save page + id, err := p.PageService.Create( + g.Request.Context(), + session, + &req, + ) + // handle response + if ok := p.handleErrors(g, err); !ok { + return + } + p.Response.OK( + g, + gin.H{ + "id": id.String(), + }, + ) +} + +// GetContentByID serves a page by id +func (p *Page) GetContentByID(g *gin.Context) { + session, _, ok := p.handleSession(g) + if !ok { + return + } + // parse request + id, ok := p.handleParseIDParam(g) + if !ok { + return + } + // get page + page, err := p.PageService.GetByID( + g, + session, + id, + &repository.PageOption{}, + ) + // handle response + if ok := p.handleErrors(g, err); !ok { + return + } + content, err := page.Content.Get() + if ok := p.handleErrors(g, err); !ok { + return + } + // build response + phishingPage, err := p.TemplateService.ApplyPageMock(content.String()) + if ok := p.handleErrors(g, err); !ok { + return + } + p.Response.OK(g, phishingPage.String()) +} + +// GetAll gets pages using pagination +func (p *Page) GetAll(g *gin.Context) { + session, _, ok := p.handleSession(g) + if !ok { + return + } + // parse request + queryArgs, ok := p.handleQueryArgs(g) + if !ok { + return + } + queryArgs.DefaultSortByUpdatedAt() + companyID := companyIDFromRequestQuery(g) + // get pages + pages, err := p.PageService.GetAll( + g, + session, + companyID, + &repository.PageOption{ + QueryArgs: queryArgs, + }, + ) + // handle response + if ok := p.handleErrors(g, err); !ok { + return + } + p.Response.OK(g, pages) +} + +// GetOverview gets pages overview using pagination +func (p *Page) GetOverview(g *gin.Context) { + session, _, ok := p.handleSession(g) + if !ok { + return + } + // parse request + queryArgs, ok := p.handleQueryArgs(g) + if !ok { + return + } + queryArgs.DefaultSortByUpdatedAt() + companyID := companyIDFromRequestQuery(g) + // get pages + pages, err := p.PageService.GetAll( + g, + session, + companyID, + &repository.PageOption{ + Fields: []string{"id", "created_at", "updated_at", "name", "company_id"}, + QueryArgs: queryArgs, + }, + ) + // handle response + if ok := p.handleErrors(g, err); !ok { + return + } + p.Response.OK(g, pages) +} + +// GetByID gets a page by id +func (p *Page) GetByID(g *gin.Context) { + session, _, ok := p.handleSession(g) + if !ok { + return + } + // parse request + id, ok := p.handleParseIDParam(g) + if !ok { + return + } + // get page + page, err := p.PageService.GetByID( + g.Request.Context(), + session, + id, + // do I really need to preload this? + &repository.PageOption{ + WithCompany: true, + }, + ) + // handle response + if ok := p.handleErrors(g, err); !ok { + return + } + p.Response.OK(g, page) +} + +// UpdateByID updates a page by id +func (p *Page) UpdateByID(g *gin.Context) { + session, _, ok := p.handleSession(g) + if !ok { + return + } + // parse request + id, ok := p.handleParseIDParam(g) + if !ok { + return + } + var req model.Page + if ok := p.handleParseRequest(g, &req); !ok { + return + } + // update page + err := p.PageService.UpdateByID( + g.Request.Context(), + session, + id, + &req, + ) + // handle response + if ok := p.handleErrors(g, err); !ok { + return + } + p.Response.OK(g, gin.H{}) +} + +// DeleteByID deletes a page by id +func (p *Page) DeleteByID(g *gin.Context) { + session, _, ok := p.handleSession(g) + if !ok { + return + } + // parse request + id, ok := p.handleParseIDParam(g) + if !ok { + return + } + // delete page + err := p.PageService.DeleteByID( + g.Request.Context(), + session, + id, + ) + // handle response + if ok := p.handleErrors(g, err); !ok { + return + } + p.Response.OK(g, gin.H{}) +} diff --git a/backend/controller/qr.go b/backend/controller/qr.go new file mode 100644 index 0000000..1fcd7f2 --- /dev/null +++ b/backend/controller/qr.go @@ -0,0 +1,92 @@ +package controller + +import ( + "image/png" + "net/http" + + "github.com/boombuler/barcode" + "github.com/boombuler/barcode/qr" + "github.com/gin-gonic/gin" + "github.com/phishingclub/phishingclub/service" +) + +// QRCodeRequest is the request to generate a QR code from a TOTP URL +type QRCodeRequest struct { + URL string `json:"url"` + DotSize int `json:"dotSize"` +} + +// QRGenerator is the QR controller +type QRGenerator struct { + Common +} + +// QRGenerator creates a HTML QR code +// It is returned in an JSON response +func (q *QRGenerator) ToHTML(g *gin.Context) { + _, _, ok := q.handleSession(g) + if !ok { + return + } + // parse request + var req QRCodeRequest + if ok := q.handleParseRequest(g, &req); !ok { + return + } + // generate QR code + qrCodeBuf, err := service.GenerateQRCode(req.URL, req.DotSize) + if err != nil { + q.Logger.Debugw("failed to genereate QR code", + "error", err, + ) + q.Response.ServerError(g) + return + } + q.Response.OK(g, qrCodeBuf) +} + +// ToTOTPURL generates a QR code from a TOTP URL +func (q *QRGenerator) ToTOTPURL(g *gin.Context) { + _, _, ok := q.handleSession(g) + if !ok { + return + } + // parse request + var req QRCodeRequest + if ok := q.handleParseRequest(g, &req); !ok { + return + } + // generate QR code + qrCode, err := qr.Encode( + req.URL, + qr.M, + qr.Auto, + ) + if err != nil { + q.Logger.Debugw("failed to generate QR code", + "error", err, + ) + q.Response.ServerError(g) + return + } + qrCode, err = barcode.Scale(qrCode, 200, 200) + if err != nil { + q.Logger.Debugw("failed to scale QR code", + "error", err, + ) + q.Response.ServerError(g) + return + } + // output QR code as png + g.Writer.Header().Set("Content-Type", "image/png") + err = png.Encode(g.Writer, qrCode) + if err == nil { + q.Logger.Debugw("failed to encode QR code", + "error", err, + ) + q.Response.ServerError(g) + return + } + // respond + g.Status(http.StatusOK) +} diff --git a/backend/controller/recipient.go b/backend/controller/recipient.go new file mode 100644 index 0000000..c78886d --- /dev/null +++ b/backend/controller/recipient.go @@ -0,0 +1,470 @@ +package controller + +import ( + "archive/zip" + "bytes" + "encoding/csv" + "fmt" + + "github.com/gin-gonic/gin" + "github.com/google/uuid" + "github.com/oapi-codegen/nullable" + "github.com/phishingclub/phishingclub/cache" + "github.com/phishingclub/phishingclub/database" + "github.com/phishingclub/phishingclub/model" + "github.com/phishingclub/phishingclub/repository" + "github.com/phishingclub/phishingclub/service" + "github.com/phishingclub/phishingclub/utils" +) + +// recipientColumnByMap is a map between the frontend and the backend +// so the frontend has user friendly names instead of direct references +// to the database schema +// this is tied to a slice in the repository package +var recipientColumnByMap = map[string]string{ + "created_at": repository.TableColumn(database.RECIPIENT_TABLE, "created_at"), + "updated_at": repository.TableColumn(database.RECIPIENT_TABLE, "updated_at"), + "email": repository.TableColumn(database.RECIPIENT_TABLE, "email"), + "phone": repository.TableColumn(database.RECIPIENT_TABLE, "phone"), + "extra identifier": repository.TableColumn(database.RECIPIENT_TABLE, "extra_identifier"), + "first_name": repository.TableColumn(database.RECIPIENT_TABLE, "first_name"), + "last_name": repository.TableColumn(database.RECIPIENT_TABLE, "last_name"), + "position": repository.TableColumn(database.RECIPIENT_TABLE, "position"), + "department": repository.TableColumn(database.RECIPIENT_TABLE, "department"), + "city": repository.TableColumn(database.RECIPIENT_TABLE, "city"), + "country": repository.TableColumn(database.RECIPIENT_TABLE, "country"), + "misc": repository.TableColumn(database.RECIPIENT_TABLE, "misc"), + "repeat_offender": "is_repeat_offender", // Special case - don't use TableColumn +} + +var recipientCampaignEventColumnMap = utils.MergeStringMaps( + campaignEventColumns, + map[string]string{ + "event": repository.TableColumnName(database.EVENT_TABLE), + "created": repository.TableColumn(database.CAMPAIGN_EVENT_TABLE, "created_at"), + "campaign": repository.TableColumn(database.CAMPAIGN_TABLE, "name"), + }, +) + +// Recipient is a Recipient controller +type Recipient struct { + Common + RecipientService *service.Recipient +} + +// Create inserts a new recipient +func (r *Recipient) Create(g *gin.Context) { + session, _, ok := r.handleSession(g) + if !ok { + return + } + // parse request + var req model.Recipient + if ok := r.handleParseRequest(g, &req); !ok { + return + } + // save recipient + id, err := r.RecipientService.Create( + g.Request.Context(), + session, + &req, + ) + // handle response + if ok := r.handleErrors(g, err); !ok { + return + } + r.Response.OK( + g, + gin.H{ + "id": id.String(), + }, + ) +} + +// GetCampaignEvents gets all campaign events by recipient id and campaign id +// gets all events if campaign id is nil +func (r *Recipient) GetCampaignEvents(g *gin.Context) { + session, _, ok := r.handleSession(g) + if !ok { + return + } + // parse request + recipientID, ok := r.handleParseIDParam(g) + if !ok { + return + } + // optional param + var campaignID *uuid.UUID + cid, err := uuid.Parse(g.Query("campaignID")) + if err == nil { + campaignID = &cid + } + queryArgs, ok := r.handleQueryArgs(g) + if !ok { + return + } + queryArgs.DefaultSortByCreatedAt() + // remap query args + queryArgs.RemapOrderBy(recipientCampaignEventColumnMap) + // get events + events, err := r.RecipientService.GetAllCampaignEvents( + g.Request.Context(), + session, + recipientID, + campaignID, + queryArgs, + ) + // handle response + if ok := r.handleErrors(g, err); !ok { + return + } + r.Response.OK(g, events) +} + +// Export outputs a zip with recipient, groups and all events related to the recipient +func (r *Recipient) Export(g *gin.Context) { + session, _, ok := r.handleSession(g) + if !ok { + return + } + // parse request + recipientID, ok := r.handleParseIDParam(g) + if !ok { + return + } + // get the recipient + recp, err := r.RecipientService.GetByID( + g, + session, + recipientID, + &repository.RecipientOption{ + WithCompany: true, + WithGroups: true, + }, + ) + if ok := r.handleErrors(g, err); !ok { + return + } + recipientBuffer := &bytes.Buffer{} + recipientWriter := csv.NewWriter(recipientBuffer) + recpHeaders := []string{ + "Created at", + "Updated at", + "Email", + "Phone", + "Extra Identifier", + "Name", + "Position", + "Department", + "City", + "Country", + "Misc", + } + groups, _ := recp.Groups.Get() + for i := range groups { + recpHeaders = append(recpHeaders, fmt.Sprintf("Group %d", i+1)) + } + err = recipientWriter.Write(recpHeaders) + if ok := r.handleErrors(g, err); !ok { + return + } + row := []string{ + utils.CSVFromDate(recp.CreatedAt), + utils.CSVFromDate(recp.UpdatedAt), + utils.CSVRemoveFormulaStart(utils.NullableToString(recp.Email)), + utils.CSVRemoveFormulaStart(utils.NullableToString(recp.Phone)), + utils.CSVRemoveFormulaStart(utils.NullableToString(recp.ExtraIdentifier)), + utils.CSVRemoveFormulaStart(utils.NullableToString(recp.FirstName)), + utils.CSVRemoveFormulaStart(utils.NullableToString(recp.LastName)), + utils.CSVRemoveFormulaStart(utils.NullableToString(recp.Position)), + utils.CSVRemoveFormulaStart(utils.NullableToString(recp.Department)), + utils.CSVRemoveFormulaStart(utils.NullableToString(recp.City)), + utils.CSVRemoveFormulaStart(utils.NullableToString(recp.Country)), + utils.CSVRemoveFormulaStart(utils.NullableToString(recp.Misc)), + } + for _, group := range groups { + row = append(row, group.Name.MustGet().String()) + } + err = recipientWriter.Write(row) + if ok := r.handleErrors(g, err); !ok { + return + } + recipientWriter.Flush() + + queryArgs, ok := r.handleQueryArgs(g) + if !ok { + return + } + queryArgs.DefaultSortByCreatedAt() + // remap query args + queryArgs.RemapOrderBy(recipientCampaignEventColumnMap) + sortOrder := g.DefaultQuery("sortOrder", "desc") + if sortOrder == "desc" { + queryArgs.Desc = true + } + + // get all rows + queryArgs.Limit = 0 + queryArgs.Offset = 0 + // get events + events, err := r.RecipientService.GetAllCampaignEvents( + g.Request.Context(), + session, + recipientID, + nil, + queryArgs, + ) + // handle response + eventsBuffer := &bytes.Buffer{} + eventsWriter := csv.NewWriter(eventsBuffer) + + headers := []string{ + "Created at", + "Campaign", + "IP", + "User-Agent", + "Event Details", + "Event", + } + err = eventsWriter.Write(headers) + if ok := r.handleErrors(g, err); !ok { + return + } + for _, event := range events.Rows { + row := []string{} + row = []string{ + utils.CSVFromDate(event.CreatedAt), + utils.CSVRemoveFormulaStart(event.CampaignName), + utils.CSVRemoveFormulaStart(event.IP.String()), + utils.CSVRemoveFormulaStart(event.UserAgent.String()), + utils.CSVRemoveFormulaStart(event.Data.String()), + utils.CSVRemoveFormulaStart(cache.EventNameByID[event.EventID.String()]), + } + err = eventsWriter.Write(row) + if ok := r.handleErrors(g, err); !ok { + return + } + } + eventsWriter.Flush() + + // create ZIP file in memory + zipBuffer := new(bytes.Buffer) + zipWriter := zip.NewWriter(zipBuffer) + zipFileName := fmt.Sprintf("recipient_export_%s.zip", recp.Email.MustGet().String()) + + // add events to zip + { + f, err := zipWriter.Create("recipient.csv") + if ok := r.handleErrors(g, err); !ok { + return + } + _, err = f.Write(recipientBuffer.Bytes()) + if ok := r.handleErrors(g, err); !ok { + return + } + } + // add events to zip + { + f, err := zipWriter.Create("events.csv") + if ok := r.handleErrors(g, err); !ok { + return + } + _, err = f.Write(eventsBuffer.Bytes()) + if ok := r.handleErrors(g, err); !ok { + return + } + } + // close zip + err = zipWriter.Close() + if ok := r.handleErrors(g, err); !ok { + return + } + + r.responseWithZIP(g, zipBuffer, zipFileName) +} + +// GetRepeatOffenderCount gets the repeat offender count +func (r *Recipient) GetRepeatOffenderCount(g *gin.Context) { + session, _, ok := r.handleSession(g) + if !ok { + return + } + + // parse request + companyID := companyIDFromRequestQuery(g) + + // get count + count, err := r.RecipientService.GetRepeatOffenderCount( + g.Request.Context(), + session, + companyID, + ) + if ok := r.handleErrors(g, err); !ok { + return + } + + r.Response.OK(g, count) +} + +// GetAll gets all recipients +func (r *Recipient) GetAll(g *gin.Context) { + session, _, ok := r.handleSession(g) + if !ok { + return + } + // parse request + companyID := companyIDFromRequestQuery(g) + queryArgs, ok := r.handleQueryArgs(g) + if !ok { + return + } + queryArgs.DefaultSortBy("first_name") + // remap query args + queryArgs.RemapOrderBy(recipientColumnByMap) + // get recipients + recipients, err := r.RecipientService.GetAll( + g.Request.Context(), + companyID, + session, + &repository.RecipientOption{ + QueryArgs: queryArgs, + }, + ) + // handle response + if ok := r.handleErrors(g, err); !ok { + return + } + r.Response.OK(g, recipients) +} + +// GetByID gets a recipient by id +func (r *Recipient) GetByID(g *gin.Context) { + session, _, ok := r.handleSession(g) + if !ok { + return + } + // parse id + id, ok := r.handleParseIDParam(g) + if !ok { + return + } + // get recipient + recipient, err := r.RecipientService.GetByID( + g.Request.Context(), + session, + id, + &repository.RecipientOption{ + WithCompany: true, + WithGroups: true, + }, + ) + // handle response + if ok := r.handleErrors(g, err); !ok { + return + } + r.Response.OK(g, recipient) +} + +// GetStatsByID gets a recipient campaign stats by id +func (r *Recipient) GetStatsByID(g *gin.Context) { + session, _, ok := r.handleSession(g) + if !ok { + return + } + // parse id + id, ok := r.handleParseIDParam(g) + if !ok { + return + } + // get recipient stats + stats, err := r.RecipientService.GetStatsByID( + g.Request.Context(), + session, + id, + ) + // handle response + if ok := r.handleErrors(g, err); !ok { + return + } + r.Response.OK(g, stats) +} + +// UpdateByID updates a recipient by id +func (r *Recipient) UpdateByID(g *gin.Context) { + session, _, ok := r.handleSession(g) + if !ok { + return + } + // parse request + id, ok := r.handleParseIDParam(g) + if !ok { + return + } + var req model.Recipient + if ok := r.handleParseRequest(g, &req); !ok { + return + } + err := r.RecipientService.UpdateByID( + g.Request.Context(), + session, + id, + &req, + ) + // handle response + if ok := r.handleErrors(g, err); !ok { + return + } + r.Response.OK(g, gin.H{}) +} + +// Import imports recipients +func (r *Recipient) Import(g *gin.Context) { + session, _, ok := r.handleSession(g) + if !ok { + return + } + // parse request + var req struct { + Recipients []*model.Recipient `json:"recipients"` + CompanyID *uuid.UUID `json:"companyID"` + IgnoreOverwriteEmptyFields nullable.Nullable[bool] `json:"ignoreOverwriteEmptyFields"` + } + if ok := r.handleParseRequest(g, &req); !ok { + return + } + // IgnoreOverwriteEmptyFields default value is true + if !req.IgnoreOverwriteEmptyFields.IsSpecified() || req.IgnoreOverwriteEmptyFields.IsNull() { + req.IgnoreOverwriteEmptyFields = nullable.NewNullableWithValue(true) + } + _, err := r.RecipientService.Import( + g, + session, + req.Recipients, + req.IgnoreOverwriteEmptyFields.MustGet(), + req.CompanyID, + ) + if ok := r.handleErrors(g, err); !ok { + return + } + r.Response.OK(g, &gin.H{}) +} + +// DeleteByID deletes a recipient by id +func (r *Recipient) DeleteByID(g *gin.Context) { + session, _, ok := r.handleSession(g) + if !ok { + return + } + // parse id + id, ok := r.handleParseIDParam(g) + if !ok { + return + } + // delete recipient + err := r.RecipientService.DeleteByID(g, session, id) + // handle response + if ok := r.handleErrors(g, err); !ok { + return + } + r.Response.OK(g, gin.H{}) +} diff --git a/backend/controller/recipientGroup.go b/backend/controller/recipientGroup.go new file mode 100644 index 0000000..e007858 --- /dev/null +++ b/backend/controller/recipientGroup.go @@ -0,0 +1,351 @@ +package controller + +import ( + "fmt" + + "github.com/gin-gonic/gin" + "github.com/google/uuid" + "github.com/oapi-codegen/nullable" + "github.com/phishingclub/phishingclub/database" + "github.com/phishingclub/phishingclub/model" + "github.com/phishingclub/phishingclub/repository" + "github.com/phishingclub/phishingclub/service" +) + +// RecipientGroupColumnsMap is a map between the frontend and the backend +// so the frontend has user friendly names instead of direct references +// to the database schema +// this is tied to a slice in the repository package +var RecipientGroupColumnsMap = map[string]string{ + "created_at": repository.TableColumn(database.RECIPIENT_GROUP_TABLE, "created_at"), + "updated_at": repository.TableColumn(database.RECIPIENT_GROUP_TABLE, "updated_at"), + "name": repository.TableColumn(database.RECIPIENT_GROUP_TABLE, "name"), +} + +// AddRecipientRequest is a request to add recipients to a recipient group +type AddRecipientRequest struct { + RecipientIDs []string `json:"recipientIDs"` +} + +// RemoveRecipientRequest is a request to remove recipients from a recipient group +type RemoveRecipientRequest struct { + RecipientIDs []string `json:"recipientIDs"` +} + +// RecipientGroup is a recipient group controller +type RecipientGroup struct { + Common + RecipientGroupService *service.RecipientGroup +} + +// Create creates a new recipient group +func (r *RecipientGroup) Create(g *gin.Context) { + session, _, ok := r.handleSession(g) + if !ok { + return + } + // parse request + var req model.RecipientGroup + if ok := r.handleParseRequest(g, &req); !ok { + return + } + // save recipient group + recipientGroupID, err := r.RecipientGroupService.Create( + g.Request.Context(), + session, + &req, + ) + // handle response + if ok := r.handleErrors(g, err); !ok { + return + } + r.Response.OK( + g, + &gin.H{ + "id": recipientGroupID.String(), + }, + ) +} + +// GetAll returns all recipient groups using pagination +func (r *RecipientGroup) GetAll(g *gin.Context) { + session, _, ok := r.handleSession(g) + if !ok { + return + } + // parse request + queryArgs, ok := r.handleQueryArgs(g) + if !ok { + return + } + queryArgs.DefaultSortByName() + queryArgs.RemapOrderBy(RecipientGroupColumnsMap) + companyContextID := companyIDFromRequestQuery(g) + + // get recipient groups + recipientGroups, err := r.RecipientGroupService.GetAll( + g, + session, + companyContextID, + &repository.RecipientGroupOption{ + QueryArgs: queryArgs, + WithCompany: true, + WithRecipientCount: true, + }, + ) + // handle response + if ok := r.handleErrors(g, err); !ok { + return + } + r.Response.OK(g, recipientGroups) +} + +// GetByID gets a recipient group by id +func (r *RecipientGroup) GetByID(g *gin.Context) { + session, _, ok := r.handleSession(g) + if !ok { + return + } + // parse id + id, ok := r.handleParseIDParam(g) + if !ok { + return + } + recipientGroup, err := r.RecipientGroupService.GetByID( + g.Request.Context(), + session, + id, + &repository.RecipientGroupOption{ + WithCompany: true, + }, + ) + // handle response + if ok := r.handleErrors(g, err); !ok { + return + } + r.Response.OK(g, recipientGroup) +} + +// GetRecipientsByGroupID gets recipients by recipient group id +func (r *RecipientGroup) GetRecipientsByGroupID(g *gin.Context) { + session, _, ok := r.handleSession(g) + if !ok { + return + } + // parse id + id, ok := r.handleParseIDParam(g) + if !ok { + return + } + queryArgs, ok := r.handleQueryArgs(g) + if !ok { + return + } + queryArgs.DefaultSortBy("email") + // remap query args + queryArgs.RemapOrderBy(recipientColumnByMap) + if !ok { + return + } + // get recipients + ctx := g.Request.Context() + recipients, err := r.RecipientGroupService.GetRecipientsByGroupID( + ctx, + session, + id, + &repository.RecipientOption{ + QueryArgs: queryArgs, + WithCompany: true, + }, + ) + // handle response + if ok := r.handleErrors(g, err); !ok { + return + } + + r.Response.OK(g, recipients) +} + +// UpdateByID updates a recipient group by id +// updates only the name and company relations +func (r *RecipientGroup) UpdateByID(g *gin.Context) { + session, _, ok := r.handleSession(g) + if !ok { + return + } + // parse id + id, ok := r.handleParseIDParam(g) + if !ok { + return + } + // parse request + var req model.RecipientGroup + if ok := r.handleParseRequest(g, &req); !ok { + return + } + // check if recipient group exists already exists + err := r.RecipientGroupService.UpdateByID( + g.Request.Context(), + session, + id, + &req, + ) + // handle response + if ok := r.handleErrors(g, err); !ok { + return + } + r.Response.OK(g, &gin.H{}) +} + +// Import imports recipients to a recipient group +func (r *RecipientGroup) Import(g *gin.Context) { + session, _, ok := r.handleSession(g) + if !ok { + return + } + // parse request + groupID, ok := r.handleParseIDParam(g) + if !ok { + return + } + var req struct { + Recipients []*model.Recipient `json:"recipients"` + CompanyID *uuid.UUID `json:"companyID"` + IgnoreOverwriteEmptyFields nullable.Nullable[bool] `json:"ignoreOverwriteEmptyFields"` + } + if ok := r.handleParseRequest(g, &req); !ok { + return + } + // IgnoreOverwriteEmptyFields default value is true + if !req.IgnoreOverwriteEmptyFields.IsSpecified() || req.IgnoreOverwriteEmptyFields.IsNull() { + req.IgnoreOverwriteEmptyFields = nullable.NewNullableWithValue(true) + } + + err := r.RecipientGroupService.Import( + g, + session, + req.Recipients, + req.IgnoreOverwriteEmptyFields.MustGet(), + groupID, + req.CompanyID, + ) + if ok := r.handleErrors(g, err); !ok { + return + } + r.Response.OK(g, &gin.H{}) +} + +// AddRecipients adds recipients to a recipient group +func (r *RecipientGroup) AddRecipients(g *gin.Context) { + // handle session + session, _, ok := r.handleSession(g) + if !ok { + return + } + // parse group ID + groupID, ok := r.handleParseIDParam(g) + if !ok { + return + } + // parse request + var req AddRecipientRequest + if ok := r.handleParseRequest(g, &req); !ok { + return + } + // parse recipient ids + recipientIDs := []*uuid.UUID{} + for _, id := range req.RecipientIDs { + rid, err := uuid.Parse(id) + if err != nil { + r.Logger.Debugw("failed to add recipients to recipient group", + "error", fmt.Errorf("failed to parse recipient id: %w", err), + ) + r.Response.BadRequestMessage(g, "invalid recipient id") + return + } + recipientIDs = append(recipientIDs, &rid) + } + // add recipients + err := r.RecipientGroupService.AddRecipients( + g.Request.Context(), + session, + groupID, + recipientIDs, + ) + // handle response + if ok := r.handleErrors(g, err); !ok { + return + } + r.Response.OK(g, &gin.H{}) +} + +// RemoveRecipients removes a recipient from a recipient group +func (r *RecipientGroup) RemoveRecipients(g *gin.Context) { + session, _, ok := r.handleSession(g) + if !ok { + return + } + // parse id + id, ok := r.handleParseIDParam(g) + if !ok { + return + } + // parse request + var req RemoveRecipientRequest + if ok := r.handleParseRequest(g, &req); !ok { + return + } + // parse recipient ids + recipientIDs := []*uuid.UUID{} + for _, id := range req.RecipientIDs { + rid, err := uuid.Parse(id) + if err != nil { + r.Logger.Debugw("failed to remove recipients from recipient group", + "error", fmt.Errorf("failed to parse recipient id: %w", err), + ) + r.Response.BadRequestMessage(g, "invalid recipient id") + return + } + recipientIDs = append(recipientIDs, &rid) + } + // remove recipients + err := r.RecipientGroupService.RemoveRecipients( + g.Request.Context(), + session, + id, + recipientIDs, + ) + // handle response + if ok := r.handleErrors(g, err); !ok { + return + } + r.Response.OK(g, &gin.H{}) +} + +// DeleteByID deletes a recipient group by id +// deleting a group also deletes all recipients in that group +func (r *RecipientGroup) DeleteByID(g *gin.Context) { + session, _, ok := r.handleSession(g) + if !ok { + return + } + // parse id + id, ok := r.handleParseIDParam(g) + if !ok { + return + } + // delete recipient group + err := r.RecipientGroupService.DeleteByID( + g.Request.Context(), + session, + id, + ) + // handle response + if ok := r.handleErrors(g, err); !ok { + return + } + r.Response.OK( + g, + &gin.H{}, + ) +} diff --git a/backend/controller/smtpConfiguration.go b/backend/controller/smtpConfiguration.go new file mode 100644 index 0000000..98aff69 --- /dev/null +++ b/backend/controller/smtpConfiguration.go @@ -0,0 +1,269 @@ +package controller + +import ( + "github.com/gin-gonic/gin" + "github.com/google/uuid" + "github.com/phishingclub/phishingclub/api" + "github.com/phishingclub/phishingclub/database" + "github.com/phishingclub/phishingclub/errs" + "github.com/phishingclub/phishingclub/model" + "github.com/phishingclub/phishingclub/repository" + "github.com/phishingclub/phishingclub/service" + "github.com/phishingclub/phishingclub/vo" +) + +// SMTPConfigurationColumnsMap is a map between the frontend and the backend +// so the frontend has user friendly names instead of direct references +// to the database schema +// this is tied to a slice in the repository package +var SMTPConfigurationColumnsMap = map[string]string{ + "created_at": repository.TableColumn(database.SMTP_CONFIGURATION_TABLE, "created_at"), + "updated_at": repository.TableColumn(database.SMTP_CONFIGURATION_TABLE, "updated_at"), + "name": repository.TableColumn(database.SMTP_CONFIGURATION_TABLE, "name"), + "host": repository.TableColumn(database.SMTP_CONFIGURATION_TABLE, "host"), + "port": repository.TableColumn(database.SMTP_CONFIGURATION_TABLE, "port"), + "username": repository.TableColumn(database.SMTP_CONFIGURATION_TABLE, "username"), +} + +// SMTPConfiguration is a controller +type SMTPConfiguration struct { + Common + SMTPConfigurationService *service.SMTPConfiguration +} + +// Create creates a new SMTPConfiguration +func (c *SMTPConfiguration) Create(g *gin.Context) { + session, _, ok := c.handleSession(g) + if !ok { + return + } + // parse request + var req model.SMTPConfiguration + if ok := c.handleParseRequest(g, &req); !ok { + return + } + // save SMTP configuration + id, err := c.SMTPConfigurationService.Create(g, session, &req) + // handle response + if ok := c.handleErrors(g, err); !ok { + return + } + c.Response.OK( + g, + gin.H{ + "id": id.String(), + }, + ) +} + +// GetAll gets SMTP configurations +func (c *SMTPConfiguration) GetAll(g *gin.Context) { + session, _, ok := c.handleSession(g) + if !ok { + return + } + // parse request + queryArgs, ok := c.handleQueryArgs(g) + if !ok { + return + } + queryArgs.DefaultSortByUpdatedAt() + queryArgs.RemapOrderBy(SMTPConfigurationColumnsMap) + companyID := companyIDFromRequestQuery(g) + // get + smtpConfigs, err := c.SMTPConfigurationService.GetAll( + g.Request.Context(), + session, + companyID, + &repository.SMTPConfigurationOption{ + QueryArgs: queryArgs, + WithCompany: true, + WithHeaders: true, + }, + ) + // handle response + if ok := c.handleErrors(g, err); !ok { + return + } + c.Response.OK(g, smtpConfigs) +} + +// GetByID gets a SMTP configuration by an ID +func (c *SMTPConfiguration) GetByID(g *gin.Context) { + session, _, ok := c.handleSession(g) + if !ok { + return + } + // parse request + id, ok := c.handleParseIDParam(g) + if !ok { + return + } + // get SMTP configuration + smtpConfig, err := c.SMTPConfigurationService.GetByID( + g.Request.Context(), + session, + id, + &repository.SMTPConfigurationOption{ + WithCompany: true, + WithHeaders: true, + }, + ) + // handle response + if ok := c.handleErrors(g, err); !ok { + return + } + c.Response.OK(g, smtpConfig) +} + +type SMTPConfigurationTestEmailRequest struct { + Email vo.Email `json:"email" binding:"required,email"` + MailFrom vo.Email `json:"mailFrom" binding:"required,mailFrom"` +} + +// TestEmail tests the connection to a SMTP configuration +func (c *SMTPConfiguration) TestEmail(g *gin.Context) { + session, _, ok := c.handleSession(g) + if !ok { + return + } + // parse request + id, ok := c.handleParseIDParam(g) + if !ok { + return + } + var req SMTPConfigurationTestEmailRequest + if ok := c.handleParseRequest(g, &req); !ok { + return + } + // test dial + err := c.SMTPConfigurationService.SendTestEmail( + g, + session, + id, + &req.Email, + &req.MailFrom, + ) + // handle any error as a validation error + if err != nil { + err = errs.NewValidationError(err) + } + // handle response + if ok := c.handleErrors(g, err); !ok { + return + } + c.Response.OK(g, gin.H{}) +} + +// UpdateByID updates a SMTP configuration - but not the headers +func (c *SMTPConfiguration) UpdateByID(g *gin.Context) { + session, _, ok := c.handleSession(g) + if !ok { + return + } + // parse request + id, ok := c.handleParseIDParam(g) + if !ok { + return + } + var req model.SMTPConfiguration + if ok := c.handleParseRequest(g, &req); !ok { + return + } + err := c.SMTPConfigurationService.UpdateByID( + g.Request.Context(), + session, + id, + &req, + ) + // handle response + if ok := c.handleErrors(g, err); !ok { + return + } + c.Response.OK(g, gin.H{}) +} + +// AddHeader adds a header to a SMTP configuration +func (c *SMTPConfiguration) AddHeader(g *gin.Context) { + session, _, ok := c.handleSession(g) + if !ok { + return + } + // parse request + var req model.SMTPHeader + if ok := c.handleParseRequest(g, &req); !ok { + return + } + // save header + smtpID, ok := c.handleParseIDParam(g) + if !ok { + return + } + createdID, err := c.SMTPConfigurationService.AddHeader( + g.Request.Context(), + session, + smtpID, + &req, + ) + // handle response + if ok := c.handleErrors(g, err); !ok { + return + } + c.Response.OK(g, gin.H{ + "id": createdID.String(), + }) +} + +// RemoveHeader removes a header from a SMTP configuration +func (c *SMTPConfiguration) RemoveHeader(g *gin.Context) { + session, _, ok := c.handleSession(g) + if !ok { + return + } + // parse request + id, ok := c.handleParseIDParam(g) + if !ok { + return + } + headerID, err := uuid.Parse(g.Param("headerID")) + if err != nil { + c.Logger.Debugw("invalid header id", + "headerID", g.Param("headerID"), + "error", err, + ) + c.Response.BadRequestMessage(g, api.InvalidSMTPConfigurationID) + return + } + // remove header + err = c.SMTPConfigurationService.RemoveHeader( + g.Request.Context(), + session, + id, + &headerID, + ) + // handle response + if ok := c.handleErrors(g, err); !ok { + return + } + c.Response.OK(g, gin.H{}) +} + +// DeleteByID deletes a SMTP configuration +func (c *SMTPConfiguration) DeleteByID(g *gin.Context) { + session, _, ok := c.handleSession(g) + if !ok { + return + } + // parse request + id, ok := c.handleParseIDParam(g) + if !ok { + return + } + // delete + err := c.SMTPConfigurationService.DeleteByID(g, session, id) + // handle response + if ok := c.handleErrors(g, err); !ok { + return + } + c.Response.OK(g, gin.H{}) +} diff --git a/backend/controller/sso.go b/backend/controller/sso.go new file mode 100644 index 0000000..273c71a --- /dev/null +++ b/backend/controller/sso.go @@ -0,0 +1,88 @@ +package controller + +import ( + "errors" + "net/http" + + "github.com/gin-gonic/gin" + "github.com/phishingclub/phishingclub/data" + "github.com/phishingclub/phishingclub/errs" + "github.com/phishingclub/phishingclub/model" + "github.com/phishingclub/phishingclub/service" +) + +// SSO the single sign on controller +type SSO struct { + Common + *service.SSO +} + +// Upsert upserts a SSO configuration +func (s *SSO) Upsert(g *gin.Context) { + session, _, ok := s.handleSession(g) + if !ok { + return + } + // parse request + var request model.SSOOption + if ok := s.handleParseRequest(g, &request); !ok { + return + } + // handle upsert + err := s.SSO.Upsert( + g.Request.Context(), + session, + &request, + ) + // handle responses + if ok := s.handleErrors(g, err); !ok { + return + } + s.Response.OK(g, gin.H{}) +} + +func (s *SSO) IsEnabled(g *gin.Context) { + // if no sso client is setup, then it is not enabled + if s.SSO.MSALClient == nil { + s.Response.OK(g, false) + return + } + s.Response.OK(g, true) +} + +func (s *SSO) EntreIDLogin(g *gin.Context) { + authURL, err := s.SSO.EntreIDLogin(g) + if errors.Is(err, errs.ErrSSODisabled) { + s.Response.BadRequest(g) + return + } + if ok := s.handleErrors(g, err); !ok { + s.Response.BadRequest(g) + return + } + g.Redirect(http.StatusTemporaryRedirect, authURL) +} + +func (s *SSO) EntreIDCallBack(g *gin.Context) { + code := g.Query("code") + session, err := s.SSO.HandlEntraIDCallback(g, code) + if err != nil { + g.Redirect(http.StatusTemporaryRedirect, "/login?ssoAuthError=1") + return + } + if ok := s.handleErrors(g, err); !ok { + return + } + // Set the session in the cookie + cookie := &http.Cookie{ + Name: data.SessionCookieKey, + Value: session.ID.String(), + Path: "/", + SameSite: http.SameSiteStrictMode, + HttpOnly: true, + Secure: true, + Expires: *session.MaxAgeAt, + } + http.SetCookie(g.Writer, cookie) + g.Redirect(http.StatusTemporaryRedirect, "/dashboard") +} diff --git a/backend/controller/update.go b/backend/controller/update.go new file mode 100644 index 0000000..78d9b26 --- /dev/null +++ b/backend/controller/update.go @@ -0,0 +1,93 @@ +package controller + +import ( + "github.com/gin-gonic/gin" + "github.com/go-errors/errors" + "github.com/phishingclub/phishingclub/data" + "github.com/phishingclub/phishingclub/errs" + "github.com/phishingclub/phishingclub/service" +) + +type Update struct { + Common + UpdateService *service.Update + OptionService *service.Option +} + +// CheckForUpdateCached checks if there is a new update from cache +func (u *Update) CheckForUpdateCached(g *gin.Context) { + session, _, ok := u.handleSession(g) + if !ok { + return + } + updateAvailable, usingSystemd, err := u.UpdateService.CheckForUpdateCached(g, session) + if ok := u.handleErrors(g, err); !ok { + return + } + u.Response.OK(g, gin.H{ + "updateAvailable": updateAvailable, + "updateInApp": usingSystemd, + }) +} + +// CheckForUpdate checks if there is a new update +func (u *Update) CheckForUpdate(g *gin.Context) { + session, _, ok := u.handleSession(g) + if !ok { + return + } + updateAvailable, usingSystemd, err := u.UpdateService.CheckForUpdate(g, session) + if ok := u.handleErrors(g, err); !ok { + return + } + u.Response.OK(g, gin.H{ + "updateAvailable": updateAvailable, + "updateInApp": usingSystemd, + }) +} + +// GetUpdateDetails gets details about the newest software update +func (u *Update) GetUpdateDetails(g *gin.Context) { + session, _, ok := u.handleSession(g) + if !ok { + return + } + opt, err := u.OptionService.GetOption(g, session, data.OptionKeyUsingSystemd) + if ok := u.handleErrors(g, err); !ok { + return + } + details, err := u.UpdateService.GetUpdateDetails(g, session) + if err != nil && !errors.Is(err, errs.ErrNoUpdateAvailable) { + if ok := u.handleErrors(g, err); !ok { + return + } + } + if errors.Is(err, errs.ErrNoUpdateAvailable) { + u.Response.OK(g, gin.H{ + "updateAvailable": false, + "updateInApp": opt.Value.String() == data.OptionValueUsingSystemdYes, + "downloadURL": "", + "latestVersion": "", + }) + return + } + u.Response.OK(g, gin.H{ + "updateAvailable": true, + "updateInApp": opt.Value.String() == data.OptionValueUsingSystemdYes, + "downloadURL": details.DownloadURL, + "latestVersion": details.LatestVersion, + }) +} + +// RunUpdate performs an update +func (u *Update) RunUpdate(g *gin.Context) { + session, _, ok := u.handleSession(g) + if !ok { + return + } + err := u.UpdateService.RunUpdate(g, session) + if ok := u.handleErrors(g, err); !ok { + return + } + u.Response.OK(g, gin.H{}) +} diff --git a/backend/controller/user.go b/backend/controller/user.go new file mode 100644 index 0000000..f0e2d8f --- /dev/null +++ b/backend/controller/user.go @@ -0,0 +1,922 @@ +package controller + +import ( + "net/http" + + "github.com/go-errors/errors" + + "github.com/gin-gonic/gin" + "github.com/google/uuid" + "github.com/phishingclub/phishingclub/data" + "github.com/phishingclub/phishingclub/database" + "github.com/phishingclub/phishingclub/errs" + "github.com/phishingclub/phishingclub/model" + "github.com/phishingclub/phishingclub/repository" + "github.com/phishingclub/phishingclub/service" + "github.com/phishingclub/phishingclub/vo" + "gorm.io/gorm" +) + +var SessionColumnsMap = map[string]string{ + "created_at": repository.TableColumn(database.SESSION_TABLE, "created_at"), + "updated_at": repository.TableColumn(database.SESSION_TABLE, "updated_at"), + "ip_address": repository.TableColumn(database.SESSION_TABLE, "ip_address"), +} + +var UserColumnsMap = map[string]string{ + "created_at": repository.TableColumn(database.USER_TABLE, "created_at"), + "updated_at": repository.TableColumn(database.USER_TABLE, "updated_at"), + "name": repository.TableColumn(database.USER_TABLE, "name"), + "username": repository.TableColumn(database.USER_TABLE, "username"), + "email": repository.TableColumn(database.USER_TABLE, "email"), +} + +// UserLoginRequest is a request for login with username and password +type UserLoginRequest struct { + Username string `json:"username"` + Password string `json:"password"` + TOTP string `json:"totp"` + MFARecoveryCode string `json:"recoveryCode"` +} + +// UserSetupTOTPRequest is a request for setting up TOTP +type UserSetupTOTPRequest struct { + Password string `json:"password"` +} + +// UserSetupDisableTOTPRequest is a request for disabling TOTP +type UserDisableTOTPRequest struct { + Token string `json:"token"` +} + +// UserVerifyTOTPRequest is a request for verifying TOTP +type UserVerifyTOTPRequest struct { + TOTP string `json:"token"` +} + +// UserLoginWithMFARecoveryCodeRequest is a request for login with MFA recovery code +type UserLoginWithMFARecoveryCodeRequest struct { + RecoveryCode string `json:"recoveryCode"` + Username string `json:"username"` + Password string `json:"password"` +} + +// User is the change email controller +type User struct { + Common + UserService *service.User +} + +// Create creates a new user +func (c *User) Create(g *gin.Context) { + session, _, ok := c.handleSession(g) + if !ok { + return + } + // parse req + var req model.UserUpsertRequest + if ok := c.handleParseRequest(g, &req); !ok { + return + } + // create user + newUserID, err := c.UserService.Create( + g, + session, + &req, + ) + if ok := c.handleErrors(g, err); !ok { + return + } + c.Response.OK( + g, + gin.H{ + "id": newUserID.String(), + }, + ) +} + +// GetMaskedAPIKey gets logged-in users masked API key +func (c *User) GetMaskedAPIKey(g *gin.Context) { + session, user, ok := c.handleSession(g) + if !ok { + return + } + if user == nil { + c.handleErrors(g, errors.New("no user in session")) + } + // get + cid := user.ID.MustGet() + apiKey, err := c.UserService.GetMaskedAPIKey( + g, + session, + &cid, + ) + if ok := c.handleErrors(g, err); !ok { + return + } + c.Response.OK( + g, + gin.H{ + "apiKey": apiKey, + }, + ) +} + +// UpsertAPIKey create/update API key +func (c *User) UpsertAPIKey(g *gin.Context) { + session, user, ok := c.handleSession(g) + if !ok { + return + } + if user == nil { + c.handleErrors(g, errors.New("no user in session")) + } + // create user + uid := user.ID.MustGet() + apiKey, err := c.UserService.UpsertAPIKey( + g, + session, + &uid, + ) + if ok := c.handleErrors(g, err); !ok { + return + } + c.Response.OK( + g, + gin.H{ + "apiKey": apiKey, + }, + ) +} + +// RemoveAPIKey removes a api key +func (c *User) RemoveAPIKey(g *gin.Context) { + session, user, ok := c.handleSession(g) + if !ok { + return + } + if user == nil { + c.handleErrors(g, errors.New("no user in session")) + } + // create user + uid := user.ID.MustGet() + err := c.UserService.RemoveAPIKey( + g, + session, + &uid, + ) + if ok := c.handleErrors(g, err); !ok { + return + } + c.Response.OK( + g, + gin.H{}, + ) +} + +// UpdateByID updates a user by ID +func (c *User) UpdateByID(g *gin.Context) { + session, _, ok := c.handleSession(g) + if !ok { + return + } + // parse request + id, ok := c.handleParseIDParam(g) + if !ok { + return + } + var req model.User + if ok := c.handleParseRequest(g, &req); !ok { + return + } + // update user + err := c.UserService.Update( + g, + session, + id, + &req, + ) + // handle response + if ok := c.handleErrors(g, err); !ok { + return + } + c.Response.OK(g, gin.H{}) +} + +// Delete deletes a user +func (c *User) Delete(g *gin.Context) { + session, _, ok := c.handleSession(g) + if !ok { + return + } + // parse request + id, ok := c.handleParseIDParam(g) + if !ok { + return + } + // delete user + err := c.UserService.Delete(g, session, id) + // handle response + if ok := c.handleErrors(g, err); !ok { + return + } + c.Response.OK(g, gin.H{}) +} + +// GetAll gets all users using pagination +func (c *User) GetAll(g *gin.Context) { + session, _, ok := c.handleSession(g) + if !ok { + return + } + // parse request + queryArgs, ok := c.handleQueryArgs(g) + if !ok { + return + } + queryArgs.DefaultSortByUpdatedAt() + queryArgs.RemapOrderBy(UserColumnsMap) + // get user + users, err := c.UserService.GetAll(g, session, &repository.UserOption{ + QueryArgs: queryArgs, + WithRole: true, + WithCompany: true, + }) + // handle response + if ok := c.handleErrors(g, err); !ok { + return + } + c.Response.OK(g, users) +} + +// GetByID gets a user by ID +func (c *User) GetByID(g *gin.Context) { + session, _, ok := c.handleSession(g) + if !ok { + return + } + // parse request + id, ok := c.handleParseIDParam(g) + if !ok { + return + } + // get user + user, err := c.UserService.GetByID(g, session, id) + // handle response + if ok := c.handleErrors(g, err); !ok { + return + } + c.Response.OK(g, user) +} + +// ChangeEmailOnLoggedInUser changes email on logged in user +// this is an administrator action +func (c *User) ChangeEmailOnLoggedInUser(g *gin.Context) { + session, sessionUser, ok := c.handleSession(g) + if !ok { + return + } + // parse and validate request + var request model.UserChangeEmailRequest + if ok := c.handleParseRequest(g, &request); !ok { + return + } + // change email + userID := sessionUser.ID.MustGet() + changedEmail, err := c.UserService.ChangeEmailAsAdministrator( + g, + session, + &userID, + &request.Email, + ) + // handle response + if ok := c.handleErrors(g, err); !ok { + return + } + c.Response.OK( + g, + gin.H{"email": changedEmail.String()}, + ) +} + +// ChangeFullnameOnLoggedInUser is the handler for change fullname +func (c *User) ChangeFullnameOnLoggedInUser(g *gin.Context) { + session, sessionUser, ok := c.handleSession(g) + if !ok { + return + } + // parse req + var req model.UserChangeFullnameRequest + if ok := c.handleParseRequest(g, &req); !ok { + return + } + // change fullname + userID := sessionUser.ID.MustGet() + _, err := c.UserService.ChangeFullname( + g, + session, + &userID, + &req.NewFullname, + ) + // handle response + if ok := c.handleErrors(g, err); !ok { + return + } + c.Response.OK(g, gin.H{}) +} + +// ChangePasswordOnLoggedInUser changes the password on the logged in user +func (c *User) ChangePasswordOnLoggedInUser(g *gin.Context) { + session, sessionUser, ok := c.handleSession(g) + if !ok { + return + } + // parse req + var req model.UserChangePasswordRequest + if ok := c.handleParseRequest(g, &req); !ok { + return + } + // change password + err := c.UserService.ChangePassword( + g, + session, + &req.CurrentPassword, + &req.NewPassword, + ) + // handle response + if errors.Is(err, errs.ErrUserWrongPasword) { + c.Response.BadRequestMessage(g, "Invalid current password") + return + } + if ok := c.handleErrors(g, err); !ok { + return + } + // invalidate all currently running sessions + userID := sessionUser.ID.MustGet() + err = c.SessionService.ExpireAllByUserID(g, session, &userID) + // partial error, the password is changed but the sessions are not invalidated + if ok := c.handleErrors(g, err); !ok { + return + } + c.Response.OK( + g, + "password changed - all sessions have been invalidated", + ) +} + +// ChangeUsernameOnLoggedInUser changes the username +func (c *User) ChangeUsernameOnLoggedInUser(g *gin.Context) { + session, sessionUser, ok := c.handleSession(g) + if !ok { + return + } + // parse req + var req model.UserChangeUsernameOnLoggedInRequest + if ok := c.handleParseRequest(g, &req); !ok { + return + } + userID := sessionUser.ID.MustGet() + // change username + err := c.UserService.ChangeUsername( + g.Request.Context(), + session, + &userID, + &req.NewUsername, + ) + // handle error + if ok := c.handleErrors(g, err); !ok { + return + } + c.Response.OK(g, gin.H{}) +} + +// ExpireSessionByID expires a session by ID +// a administrator can expire any session +// a user can expire their own sessions +func (c *User) ExpireSessionByID(g *gin.Context) { + session, _, ok := c.handleSession(g) + if !ok { + return + } + id, ok := c.handleParseIDParam(g) + if !ok { + return + } + isAuthorized, err := service.IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if ok := c.handleErrors(g, err); !ok { + return + } + if !isAuthorized { + c.Response.Forbidden(g) + return + } + err = c.SessionService.Expire(g, id) + // handle response + if ok := c.handleErrors(g, err); !ok { + return + } + c.Response.OK( + g, + "session expired", + ) +} + +// GetSessionsByUserID gets all sessions by user ID +func (c *User) GetSessionsOnLoggedInUser(g *gin.Context) { + session, sessionUser, ok := c.handleSession(g) + if !ok { + return + } + // parse request + queryArgs, ok := c.handleQueryArgs(g) + if !ok { + return + } + queryArgs.DefaultSortByUpdatedAt() + queryArgs.RemapOrderBy(SessionColumnsMap) + userID := sessionUser.ID.MustGet() + sessions, err := c.SessionService.GetSessionsByUserID( + g, + session, + &userID, + &repository.SessionOption{ + QueryArgs: queryArgs, + }, + ) + // handle response + if ok := c.handleErrors(g, err); !ok { + return + } + data := []map[string]interface{}{} + for _, sess := range sessions.Rows { + idStr := sess.ID.String() + data = append(data, map[string]interface{}{ + "id": idStr, + "current": idStr == session.ID.String(), + "ip": sess.IP, + "createdAt": sess.CreatedAt, + "updatedAt": sess.UpdatedAt, + }) + } + c.Response.OK( + g, + gin.H{"sessions": data}, + ) +} + +// Login logs in a user +func (c *User) Login(g *gin.Context) { + // parse req + var req UserLoginRequest + if ok := c.handleParseRequest(g, &req); !ok { + return + } + user, err := c.UserService.AuthenticateUsernameWithPassword( + g, + req.Username, + req.Password, + g.ClientIP(), + ) + if errors.Is(err, errs.ErrUserWrongPasword) { + c.Response.BadRequestMessage(g, "Invalid password") + return + } + if errors.Is(err, gorm.ErrRecordNotFound) { + c.Response.BadRequestMessage(g, "Invalid credentials") + return + } + if ok := c.handleErrors(g, err); !ok { + return + } + // if the user has MFA enabled then we check the MFA flow + // if the user has MFA enabled, we must check if there is a + // valid MFA or a valid recovery code + userID := user.ID.MustGet() + MFATokenSupplied := len(req.TOTP) > 0 + MFARecoveryCodeSupplied := len(req.MFARecoveryCode) > 0 + mfaEnabled, err := c.UserService.IsTOTPEnabledByUserID( + g, + &userID, + ) + if errors.Is(err, errs.ErrUserWrongTOTP) { + c.Response.BadRequestMessage(g, "Invalid TOTP") + return + } + if ok := c.handleErrors(g, err); !ok { + return + } + if mfaEnabled { + // if tokens or recovery codes are supplied + // return mfa is required + if !MFATokenSupplied && !MFARecoveryCodeSupplied { + c.Response.OK( + g, + gin.H{ + "mfa": true, + }, + ) + return + } + // if the client has given both a TOTP and a recovery code + // we return a bad request + if MFATokenSupplied && MFARecoveryCodeSupplied { + c.Response.BadRequestMessage(g, "Cannot supply both MFA token and MFA recovery code") + return + } + // verify the TOTP MFA token + userID := user.ID.MustGet() + if MFATokenSupplied && !MFARecoveryCodeSupplied { + // if MFA is enabled, verify the TOTP + totpToken, err := vo.NewString64(req.TOTP) + if err != nil { + c.Logger.Debugw("failed to create TOTP", + "error", err, + ) + c.Response.ValidationFailed(g, "TOTP", err) + return + } + err = c.UserService.CheckTOTP( + g, + &userID, + totpToken, + ) + if err != nil { + if errors.Is(err, errs.ErrUserWrongTOTP) { + c.Response.BadRequestMessage(g, "Invalid TOTP") + return + } + if ok := c.handleErrors(g, err); !ok { + return + } + } + } + // if the user has MFA enabled and the client has supplied a recovery code + // we verify the recovery code + if !MFATokenSupplied && MFARecoveryCodeSupplied { + recoveryCode, err := vo.NewString64(req.MFARecoveryCode) + if err != nil { + c.Logger.Debugw("failed to create recovery code", + "error", err, + ) + c.Response.ValidationFailed(g, "RecoveryCode", err) + return + } + verifiedMFA, err := c.UserService.CheckMFARecoveryCode( + g, + &userID, + recoveryCode, + ) + if err != nil { + if errors.Is(err, errs.ErrUserWrongRecoveryCode) { + c.Response.BadRequestMessage(g, "Invalid recovery code") + return + } + if ok := c.handleErrors(g, err); !ok { + return + } + } + if !verifiedMFA { + c.Response.BadRequestMessage(g, "Invalid recovery code") + return + } + // as the recovery code is valid, we can now disable MFA + err = c.UserService.DisableTOTP(g, &userID) + if ok := c.handleErrors(g, err); !ok { + return + } + } + } + // create a new session + session, err := c.SessionService.Create( + g, + user, + g.ClientIP(), + ) + // handle response + if ok := c.handleErrors(g, err); !ok { + return + } + // Set the session in the cookie + cookie := &http.Cookie{ + Name: data.SessionCookieKey, + Value: session.ID.String(), + Path: "/", + SameSite: http.SameSiteStrictMode, + HttpOnly: true, + Secure: true, + Expires: *session.MaxAgeAt, + } + http.SetCookie(g.Writer, cookie) + c.Response.OK(g, session) +} + +// expireCookieAndStatusOK expires the cookie and returns a 200 OK +func (c *User) expireCookieAndStatusOK(g *gin.Context) { + g.SetCookie( + data.SessionCookieKey, + "", + -1, + "/", + "", + false, + true, + ) + c.logoutOK(g) +} + +// logoutOK returns a 200 OK +func (c *User) logoutOK(g *gin.Context) { + c.Response.OK( + g, + gin.H{"message": "logged out"}, + ) +} + +// Logout logs out the user +// only invalidates the session if the session cookie is +// in the request, this should reduce the risk of CSRF logout +func (c *User) Logout(g *gin.Context) { + sessionCookie, err := g.Cookie(data.SessionCookieKey) + if err != nil { + c.logoutOK(g) + return + } + sessionID, err := uuid.Parse(sessionCookie) + if err != nil { + c.logoutOK(g) + return + } + ctx := g.Request.Context() + err = c.SessionService.Expire(ctx, &sessionID) + if err != nil { + c.expireCookieAndStatusOK(g) + return + } + c.expireCookieAndStatusOK(g) +} + +// SessionPing pings the session +func (c *User) SessionPing(g *gin.Context) { + // handle session + session, sessionUser, ok := c.handleSession(g) + if !ok { + return + } + c.Logger.Debugw("pinged session for user", + "userID", sessionUser.ID.MustGet().String(), + ) + sessionRole := sessionUser.Role + if sessionRole == nil { + c.Logger.Error("failed to load role from session user") + c.Response.ServerError(g) + return + } + sessionCompany := sessionUser.Company + companyName := "" + if sessionCompany != nil { + companyName = sessionCompany.Name.MustGet().String() + } + c.Response.OK( + g, + gin.H{ + "userID": sessionUser.ID, + "username": sessionUser.Username.MustGet().String(), + "name": sessionUser.Name.MustGet().String(), + "role": sessionRole.Name, + "company": companyName, + "ip": session.IP, + }, + ) +} + +// InvalidateAllSessionByUserID is the nuclear session button for a user +func (c *User) InvalidateAllSessionByUserID(g *gin.Context) { + session, user, ok := c.handleSession(g) + if !ok { + return + } + var userID *uuid.UUID + // parse req + var req model.InvalidateAllSessionRequest + err := g.ShouldBindJSON(&req) + if err != nil { + if user == nil || !user.ID.IsSpecified() { + c.Response.BadRequest(g) + return + } + uid := user.ID.MustGet() + userID = &uid + } else { + if req.UserID == nil { + c.Response.BadRequest(g) + return + } + userID = req.UserID + } + // invalidate + err = c.SessionService.ExpireAllByUserID(g, session, userID) + if ok := c.handleErrors(g, err); !ok { + return + } + c.Response.OK(g, gin.H{}) +} + +// SetupTOTP generates a new TOTP MFA secrets +func (c *User) SetupTOTP(g *gin.Context) { + session, _, ok := c.handleSession(g) + if !ok { + return + } + // parse request + var request UserSetupTOTPRequest + if ok := c.handleParseRequest(g, &request); !ok { + return + } + passwd, err := vo.NewReasonableLengthPassword(request.Password) + if err != nil { + c.Logger.Debugw("failed to create password", + "error", err, + ) + c.Response.ValidationFailed(g, "Password", err) + return + } + // get and save TOTP for user + totpValues, err := c.UserService.SetupTOTP( + g.Request.Context(), + session, + passwd, + ) + // handle response + if errors.Is(err, errs.ErrAuthenticationFailed) { + c.Response.BadRequestMessage(g, "Incorrect password") + return + } + if ok := handleServerError(g, c.Response, err); !ok { + return + } + c.Response.OK( + g, + gin.H{ + "base32": totpValues.Secret, + "url": totpValues.URL, + "recoveryCode": totpValues.RecoveryCode, + }, + ) +} + +// SetupVerifyTOTP verifies a TOTP +func (c *User) SetupVerifyTOTP(g *gin.Context) { + session, _, ok := c.handleSession(g) + if !ok { + return + } + // parse req + var req UserVerifyTOTPRequest + if ok := c.handleParseRequest(g, &req); !ok { + return + } + totp, err := vo.NewString64(req.TOTP) + if err != nil { + c.Logger.Debugw("failed to create TOTP", + "error", err, + ) + c.Response.ValidationFailed(g, "TOTP", err) + return + } + // verify TOTP + err = c.UserService.SetupCheckTOTP( + g.Request.Context(), + session, + totp, + ) + if errors.Is(err, errs.ErrUserWrongTOTP) { + c.Response.BadRequestMessage(g, "Invalid token") + return + } + + // handle response + if ok := c.handleErrors(g, err); !ok { + return + } + c.Response.OK( + g, + "TOTP verified", + ) +} + +// IsTOTPEnabled checks if TOTP is enabled +func (c *User) IsTOTPEnabled(g *gin.Context) { + session, _, ok := c.handleSession(g) + if !ok { + return + } + // check if TOTP is enabled + isEnabled, err := c.UserService.IsTOTPEnabled( + g.Request.Context(), + session, + ) + // handle response + if ok := handleServerError(g, c.Response, err); !ok { + return + } + c.Response.OK( + g, + gin.H{"enabled": isEnabled}, + ) +} + +// DisableTOTP disables TOTP +func (c *User) DisableTOTP(g *gin.Context) { + _, user, ok := c.handleSession(g) + if !ok { + return + } + // parse request + var request UserDisableTOTPRequest + if ok := c.handleParseRequest(g, &request); !ok { + return + } + token, err := vo.NewString64(request.Token) + if err != nil { + c.Logger.Debugw("failed to create token", + "error", err, + ) + c.Response.ValidationFailed(g, "Token", err) + return + } + // check TOTP + userID := user.ID.MustGet() + err = c.UserService.CheckTOTP( + g.Request.Context(), + &userID, + token, + ) + if err != nil { + if errors.Is(err, errs.ErrUserWrongTOTP) { + c.Response.BadRequestMessage(g, "Invalid token") + return + } + if ok := c.handleErrors(g, err); !ok { + return + } + } + // disable TOTP + err = c.UserService.DisableTOTP( + g.Request.Context(), + &userID, + ) + // handle response + if err != nil { + if errors.Is(err, errs.ErrUserWrongTOTP) { + c.Response.BadRequestMessage(g, "Invalid token") + return + } + if ok := c.handleErrors(g, err); !ok { + return + } + } + c.Response.OK( + g, + "TOTP disabled", + ) +} + +// VerifyTOTP verifies a TOTP +func (c *User) VerifyTOTP(g *gin.Context) { + _, user, ok := c.handleSession(g) + if !ok { + return + } + // parse req + var req UserVerifyTOTPRequest + if ok := c.handleParseRequest(g, &req); !ok { + return + } + totp, err := vo.NewString64(req.TOTP) + if err != nil { + c.Logger.Debugw("failed to create TOTP", + "error", err, + ) + c.Response.ValidationFailed(g, "TOTP", err) + return + } + // verify TOTP + userID := user.ID.MustGet() + err = c.UserService.CheckTOTP( + g.Request.Context(), + &userID, + totp, + ) + if errors.Is(err, errs.ErrUserWrongTOTP) { + c.Response.BadRequestMessage(g, "Invalid token") + return + } + // handle response + if ok := c.handleErrors(g, err); !ok { + return + } + c.Response.OK( + g, + "TOTP verified", + ) +} diff --git a/backend/controller/utils.go b/backend/controller/utils.go new file mode 100644 index 0000000..55cf231 --- /dev/null +++ b/backend/controller/utils.go @@ -0,0 +1,358 @@ +package controller + +import ( + "bytes" + "encoding/csv" + "fmt" + "io" + "mime" + "path/filepath" + "strings" + "time" + + "github.com/go-errors/errors" + + "github.com/gin-gonic/gin" + "github.com/google/uuid" + "github.com/phishingclub/phishingclub/api" + "github.com/phishingclub/phishingclub/errs" + "github.com/phishingclub/phishingclub/model" + "github.com/phishingclub/phishingclub/service" + "github.com/phishingclub/phishingclub/utils" + "github.com/phishingclub/phishingclub/vo" + "go.uber.org/zap" + "gorm.io/gorm" +) + +// Common is a common controller base struct it holds common operations on the +// common dependencies +type Common struct { + Response api.JSONResponseHandler + Logger *zap.SugaredLogger + SessionService *service.Session +} + +// handleSession handles the session and returns the session and user +// if the session is not valid, a 401 response is sent +func (c *Common) handleSession( + g *gin.Context, +) (*model.Session, *model.User, bool) { + s, ok := g.Get("session") + if !ok { + c.Logger.Debug("session not found in context") + c.Response.Unauthorized(g) + return nil, nil, false + } + session, ok := s.(*model.Session) + if !ok { + c.Logger.Error("session in context is not of type model.Session") + c.Response.Unauthorized(g) + return nil, nil, false + } + user := session.User + if user == nil { + c.Logger.Error("user not found in session") + c.Response.Unauthorized(g) + return nil, nil, false + } + return session, user, true +} + +// HandleParseRequest parses the request and returns true if successful +// if the request is not parsable, a 400 response is sent +func (c *Common) handleParseRequest( + g *gin.Context, + req any, +) bool { + body, err := io.ReadAll(g.Request.Body) + if err != nil { + c.Logger.Debugw("failed to read request body", + "error", err, + ) + c.Response.BadRequest(g) + return false + } + if err := utils.Unmarshal(body, &req); err != nil { + c.Logger.Debugw("failed to parse request", + "error", err, + ) + c.Response.BadRequestMessage(g, err.Error()) + return false + } + return true +} + +// handleParseIDParam parses the id parameter from the request +// and returns it if successful +// if the id is not parsable, a 400 response is sent +func (c *Common) handleParseIDParam( + g *gin.Context, +) (*uuid.UUID, bool) { + id, err := uuid.Parse(g.Param("id")) + if err != nil { + c.Logger.Debugw("failed to parse id", + "error", err, + ) + c.Response.BadRequestMessage(g, errs.MsgFailedToParseUUID) + return nil, false + } + return &id, true +} + +// handlePagination parses the pagination from the request and returns it +// if the pagination is not valid, a 400 response is sent +func (c *Common) handlePagination( + g *gin.Context, +) (*vo.Pagination, bool) { + pagination, err := vo.NewPaginationFromRequest(g) + if err != nil { + c.Logger.Debugw("invalid offset or limit", + "error", err, + ) + c.Response.ValidationFailed(g, "pagination", err) + return nil, false + } + return pagination, true +} + +// handleQueryArgs parses the query from the request and returns it +func (c *Common) handleQueryArgs(g *gin.Context) (*vo.QueryArgs, bool) { + q, err := vo.QueryFromRequest(g) + if err != nil { + c.Logger.Debugw("failed to parse query", + "error", err, + ) + c.Response.ValidationFailed(g, "query args", err) + return nil, false + } + return q, true +} + +// handleErrors is a helper function to handle common handleErrors +// it most often checks for more than what is needed, but is +// useful to avoid missing any error handling and saving time +// it returns true if no errors are found +// it returns false if an error is found and a response is sent +func (c *Common) handleErrors( + g *gin.Context, + err error, +) bool { + if err != nil { + if ok := handleAuthorizationError(g, c.Response, err); !ok { + c.Logger.Debugw("authorization error", + "auth_error", err, + ) + return false + } + if ok := handleValidationError(g, c.Response, err); !ok { + c.Logger.Debugw("validation error", + "validation_error", err, + ) + return false + } + if ok := handleCustomError(g, c.Response, err); !ok { + c.Logger.Debugw("custom error", + "custom_error", err, + ) + return false + } + if ok := handleDBRowNotFound(g, c.Response, err); !ok { + c.Logger.Debugw("DB row not found error", + "error", err, + ) + return false + } + c.Logger.Errorw("API unknown error type", "error", err) + _ = handleServerError(g, c.Response, err) + return false + } + return true +} + +// responseWithCSV +func (c *Common) responseWithCSV( + g *gin.Context, + buffer *bytes.Buffer, + writer *csv.Writer, + name string, +) { + writer.Flush() + if err := writer.Error(); err != nil { + c.handleErrors(g, err) + return + } + // Set CSV response headers + setSecureContentDisposition(g, name) + g.Header("Content-Type", "text/csv") + g.Header("Content-Length", fmt.Sprint(buffer.Len())) + + // Write the CSV buffer to the response + _, err := g.Writer.Write(buffer.Bytes()) + if err != nil { + c.handleErrors(g, err) + } +} + +// responseWithZIP +func (c *Common) responseWithZIP( + g *gin.Context, + buffer *bytes.Buffer, + name string, +) { + g.Header("Content-Type", "application/zip") + setSecureContentDisposition(g, name) + g.Header("Content-Transfer-Encoding", "binary") + g.Header("Expires", "0") + g.Header("Cache-Control", "must-revalidate") + g.Header("Pragma", "public") + g.Header("Content-Length", fmt.Sprintf("%d", buffer.Len())) + + _, err := g.Writer.Write(buffer.Bytes()) + if err != nil { + c.handleErrors(g, err) + } +} + +// companyIDFromRequestQuery returns the companyID as a UUID from the query +// or nil if not found +func companyIDFromRequestQuery(g *gin.Context) *uuid.UUID { + companyID := g.Query("companyID") + if companyID != "" { + cid, err := uuid.Parse(companyID) + if err != nil { + return nil + } + return &cid + } + return nil +} + +// SetSessionInGinContext sets the session in the gin context +func SetSessionInGinContext(c *gin.Context, s *model.Session) { + c.Set("session", s) +} + +// handleDBRowNotFound checks if the error is a not found error +// if it is, a 404 response is sent +// if it is not, true is returned +func handleDBRowNotFound( + g *gin.Context, + responseHandler api.JSONResponseHandler, + err error, +) bool { + if errors.Is(err, gorm.ErrRecordNotFound) { + // error is logged in service + _ = err + responseHandler.NotFound(g) + return false + } + return true +} + +// handleAuthorizationError checks if the error is an authorization error +// if it is, a 403 response is sent +// if it is not, true is returned +func handleAuthorizationError( + g *gin.Context, + responseHandler api.JSONResponseHandler, + err error, +) bool { + if errors.Is(err, errs.ErrAuthorizationFailed) { + // error is logged in service + _ = err + responseHandler.Forbidden(g) + return false + } + return true +} + +// handleValidationError checks if the error is a validation error +// if it is, a 400 response is sent +// if it is not, true is returned +func handleValidationError( + g *gin.Context, + responseHandler api.JSONResponseHandler, + err error, +) bool { + if errors.As(err, &errs.ValidationError{}) { + // error is logged in service + _ = err + responseHandler.BadRequestMessage(g, err.Error()) + return false + } + return true +} + +// handleCustomError checks if the error is a custom error +// if it is a 400 response is sent +// if it is not, true is returned +func handleCustomError( + g *gin.Context, + responseHandler api.JSONResponseHandler, + err error, +) bool { + if errors.As(err, &errs.CustomError{}) { + // error is logged in service + _ = err + responseHandler.BadRequestMessage(g, err.Error()) + return false + } + return true +} + +// handleServerError checks if the error is a server error +// if it is, a 500 response is sent +// if it is not, true is returned +func handleServerError( + g *gin.Context, + responseHandler api.JSONResponseHandler, + err error, +) bool { + if err != nil { + // error is logged in service + _ = err + responseHandler.ServerError(g) + return false + } + return true +} + +func setSecureContentDisposition(c *gin.Context, filename string) { + // Strip any directory components + filename = filepath.Base(filename) + + // Remove any potentially problematic characters + filename = strings.Map(func(r rune) rune { + // Keep only alphanumeric, space, dash, underscore and dot + if (r >= 'a' && r <= 'z') || + (r >= 'A' && r <= 'Z') || + (r >= '0' && r <= '9') || + (r == ' ' || r == '-' || r == '_' || r == '.') { + return r + } + return -1 + }, filename) + + // Ensure we still have a valid filename + if filename == "" || filename == "." || filename == ".." { + filename = time.Now().UTC().Format("20060102150405") + } + + // Properly encode the filename for Content-Disposition + encodedFilename := mime.QEncoding.Encode("utf-8", filename) + + c.Header("Content-Disposition", + fmt.Sprintf(`attachment; filename="%s";`, + encodedFilename, + ), + ) +} + +func (c *Common) requiresFlag(g *gin.Context, featureFlag string) { + // handle session + _, _, ok := c.handleSession(g) + if !ok { + return + } + c.Response.ServerErrorMessage(g, "requires "+featureFlag+" edition") +} diff --git a/backend/controller/version.go b/backend/controller/version.go new file mode 100644 index 0000000..ebca93b --- /dev/null +++ b/backend/controller/version.go @@ -0,0 +1,26 @@ +package controller + +import ( + "github.com/gin-gonic/gin" + "github.com/phishingclub/phishingclub/service" +) + +// Version is a controller +type Version struct { + Common + versionService *service.Version +} + +// Get application version +func (c *Version) Get(g *gin.Context) { + // handle session + session, _, ok := c.handleSession(g) + if !ok { + return + } + version, err := c.versionService.Get(g.Request.Context(), session) + if ok := handleServerError(g, c.Response, err); !ok { + return + } + c.Response.OK(g, version) +} diff --git a/backend/controller/webhook.go b/backend/controller/webhook.go new file mode 100644 index 0000000..df39250 --- /dev/null +++ b/backend/controller/webhook.go @@ -0,0 +1,171 @@ +package controller + +import ( + "github.com/gin-gonic/gin" + "github.com/phishingclub/phishingclub/database" + "github.com/phishingclub/phishingclub/model" + "github.com/phishingclub/phishingclub/repository" + "github.com/phishingclub/phishingclub/service" +) + +// WebhookColumnsMap is a map between the frontend and the backend +// so the frontend has user friendly names instead of direct references +// to the database schema +// this is tied to a slice in the repository package +var WebhookColumnsMap = map[string]string{ + "created_at": repository.TableColumn(database.WEBHOOK_TABLE, "created_at"), + "updated_at": repository.TableColumn(database.WEBHOOK_TABLE, "updated_at"), + "name": repository.TableColumn(database.WEBHOOK_TABLE, "name"), +} + +// Webhook is a controller +type Webhook struct { + Common + WebhookService *service.Webhook +} + +// Create creates a new webhook +func (w *Webhook) Create(g *gin.Context) { + session, _, ok := w.handleSession(g) + if !ok { + return + } + // parse request + var req model.Webhook + if ok := w.handleParseRequest(g, &req); !ok { + return + } + // save webhook + id, err := w.WebhookService.Create(g.Request.Context(), session, &req) + // handle response + if ok := w.handleErrors(g, err); !ok { + return + } + w.Response.OK( + g, + gin.H{ + "id": id.String(), + }, + ) +} + +// GetAll gets the webhooks +func (w *Webhook) GetAll(g *gin.Context) { + session, _, ok := w.handleSession(g) + if !ok { + return + } + // parse request + queryArgs, ok := w.handleQueryArgs(g) + if !ok { + return + } + queryArgs.DefaultSortByUpdatedAt() + companyID := companyIDFromRequestQuery(g) + // get + webhooks, err := w.WebhookService.GetAll( + g.Request.Context(), + session, + companyID, + &repository.WebhookOption{ + QueryArgs: queryArgs, + }, + ) + // handle response + if ok := w.handleErrors(g, err); !ok { + return + } + w.Response.OK( + g, + webhooks, + ) +} + +// GetByID gets a webhook by id +func (w *Webhook) GetByID(g *gin.Context) { + session, _, ok := w.handleSession(g) + if !ok { + return + } + // parse request + id, ok := w.handleParseIDParam(g) + if !ok { + return + } + // get + webhook, err := w.WebhookService.GetByID( + g.Request.Context(), + session, + id, + ) + // handle response + if ok := w.handleErrors(g, err); !ok { + return + } + w.Response.OK(g, webhook) +} + +// Update updates a webhook +func (w *Webhook) UpdateByID(g *gin.Context) { + session, _, ok := w.handleSession(g) + if !ok { + return + } + // parse request + id, ok := w.handleParseIDParam(g) + if !ok { + return + } + + var req model.Webhook + if ok := w.handleParseRequest(g, &req); !ok { + return + } + // save + err := w.WebhookService.Update(g.Request.Context(), session, id, &req) + // handle response + if ok := w.handleErrors(g, err); !ok { + return + } + w.Response.OK(g, nil) +} + +// DeleteByID deletes a webhook by id +func (w *Webhook) DeleteByID(g *gin.Context) { + session, _, ok := w.handleSession(g) + if !ok { + return + } + // parse request + id, ok := w.handleParseIDParam(g) + if !ok { + return + } + // delete + err := w.WebhookService.DeleteByID(g, session, id) + // handle response + if ok := w.handleErrors(g, err); !ok { + return + } + w.Response.OK(g, nil) +} + +// SendTest sends a test webhook +func (w *Webhook) SendTest(g *gin.Context) { + session, _, ok := w.handleSession(g) + if !ok { + return + } + // parse request + id, ok := w.handleParseIDParam(g) + if !ok { + return + } + // send + data, err := w.WebhookService.SendTest(g.Request.Context(), session, id) + // handle response + if ok := w.handleErrors(g, err); !ok { + return + } + w.Response.OK(g, data) +} diff --git a/backend/data/assets.go b/backend/data/assets.go new file mode 100644 index 0000000..74051a6 --- /dev/null +++ b/backend/data/assets.go @@ -0,0 +1,4 @@ +package data + +const ASSET_GLOBAL_FOLDER = "shared" +const ATTACHMENT_GLOBAL_FOLDER = "shared" diff --git a/backend/data/events.go b/backend/data/events.go new file mode 100644 index 0000000..b01ca8c --- /dev/null +++ b/backend/data/events.go @@ -0,0 +1,36 @@ +package data + +const ( + EVENT_CAMPAIGN_SCHEDULED = "campaign_scheduled" + EVENT_CAMPAIGN_ACTIVE = "campaign_active" + EVENT_CAMPAIGN_SELF_MANAGED = "campaign_self_managed" + EVENT_CAMPAIGN_CLOSED = "campaign_closed" + + EVENT_CAMPAIGN_RECIPIENT_SCHEDULED = "campaign_recipient_scheduled" + EVENT_CAMPAIGN_RECIPIENT_MESSAGE_SENT = "campaign_recipient_message_sent" + EVENT_CAMPAIGN_RECIPIENT_MESSAGE_FAILED = "campaign_recipient_message_failed" + EVENT_CAMPAIGN_RECIPIENT_MESSAGE_READ = "campaign_recipient_message_read" + EVENT_CAMPAIGN_RECIPIENT_BEFORE_PAGE_VISITED = "campaign_recipient_before_page_visited" + EVENT_CAMPAIGN_RECIPIENT_PAGE_VISITED = "campaign_recipient_page_visited" + EVENT_CAMPAIGN_RECIPIENT_AFTER_PAGE_VISITED = "campaign_recipient_after_page_visited" + EVENT_CAMPAIGN_RECIPIENT_SUBMITTED_DATA = "campaign_recipient_submitted_data" + EVENT_CAMPAIGN_RECIPIENT_CANCELLED = "campaign_recipient_cancelled" +) + +var Events = []string{ + // campaign events + EVENT_CAMPAIGN_SCHEDULED, + EVENT_CAMPAIGN_ACTIVE, + EVENT_CAMPAIGN_SELF_MANAGED, + EVENT_CAMPAIGN_CLOSED, + // campaign recipient events + EVENT_CAMPAIGN_RECIPIENT_SCHEDULED, + EVENT_CAMPAIGN_RECIPIENT_MESSAGE_SENT, + EVENT_CAMPAIGN_RECIPIENT_MESSAGE_FAILED, + EVENT_CAMPAIGN_RECIPIENT_MESSAGE_READ, + EVENT_CAMPAIGN_RECIPIENT_BEFORE_PAGE_VISITED, + EVENT_CAMPAIGN_RECIPIENT_PAGE_VISITED, + EVENT_CAMPAIGN_RECIPIENT_AFTER_PAGE_VISITED, + EVENT_CAMPAIGN_RECIPIENT_SUBMITTED_DATA, + EVENT_CAMPAIGN_RECIPIENT_CANCELLED, +} diff --git a/backend/data/install.go b/backend/data/install.go new file mode 100644 index 0000000..233bddf --- /dev/null +++ b/backend/data/install.go @@ -0,0 +1,15 @@ +package data + +const ( + DefaultAdminCertDir = "certs/admin" + DefaultAdminPublicCertFileName = "public.pem" + DefaultAdminPrivateCertFileName = "private.pem" +) + +const ( + // default admin user + DefaultSacrificalAccountUsername = "admin" + DefaultSacrificalAccountName = "admin" + DefaultSacrificalAccountEmail = "admin@localhost.invalid" // RFC 2606 + DefaultSacrificalCompanyName = "company" +) diff --git a/backend/data/messageType.go b/backend/data/messageType.go new file mode 100644 index 0000000..fb51c93 --- /dev/null +++ b/backend/data/messageType.go @@ -0,0 +1,6 @@ +package data + +const ( + MESSAGE_TYPE_EMAIL = "email" + MESSAGE_TYPE_SMS = "sms" +) diff --git a/backend/data/option.go b/backend/data/option.go new file mode 100644 index 0000000..8aa8826 --- /dev/null +++ b/backend/data/option.go @@ -0,0 +1,26 @@ +package data + +const ( + OptionKeyIsInstalled = "is_installed" + OptionValueIsInstalled = "true" + OptionValueIsNotInstalled = "false" + // KeyIsInstalled is the key for the is_installed option + OptionKeyInstanceID = "instance_id" + + OptionKeyLogLevel = "log_level" + OptionKeyDBLogLevel = "db_log_level" + + OptionKeyUsingSystemd = "systemd_install" + OptionValueUsingSystemdYes = "true" + OptionValueUsingSystemdNo = "false" + + OptionKeyDevelopmentSeeded = "development_seeded" + OptionValueSeeded = "true" + + OptionKeyMaxFileUploadSizeMB = "max_file_upload_size_mb" + OptionValueKeyMaxFileUploadSizeMBDefault = "100" + + OptionKeyRepeatOffenderMonths = "repeat_offender_months" + + OptionKeyAdminSSOLogin = "sso_login" +) diff --git a/backend/data/pageType.go b/backend/data/pageType.go new file mode 100644 index 0000000..9c3a549 --- /dev/null +++ b/backend/data/pageType.go @@ -0,0 +1,8 @@ +package data + +const ( + PAGE_TYPE_BEFORE = "before" + PAGE_TYPE_LANDING = "landing" + PAGE_TYPE_AFTER = "after" + PAGE_TYPE_DONE = "done" +) diff --git a/backend/data/permissions.go b/backend/data/permissions.go new file mode 100644 index 0000000..b77c843 --- /dev/null +++ b/backend/data/permissions.go @@ -0,0 +1,6 @@ +package data + +const ( + // PERMISSION_ALLOW_GLOBAL allows all permissions, it is the god mode of permissions + PERMISSION_ALLOW_GLOBAL = "*" +) diff --git a/backend/data/roles.go b/backend/data/roles.go new file mode 100644 index 0000000..e5796d7 --- /dev/null +++ b/backend/data/roles.go @@ -0,0 +1,26 @@ +package data + +// This is name key for the different roles +const ( + // RoleSystem is the system role + // is is reserved for system actions only + RoleSystem = "system" + // RoleSuperAdministrator is the super administrator role + // this role has access to everything a user can do + RoleSuperAdministrator = "superadministrator" + // RoleCompanyAdministrator is the company role + // this role had read access to their associated company + RoleCompanyUser = "companyuser" +) + +// RolePermissions is a map of roles to their permissions +// these are the roles and their permissions +var RolePermissions = map[string][]string{ + RoleSystem: { + PERMISSION_ALLOW_GLOBAL, + }, + RoleSuperAdministrator: { + PERMISSION_ALLOW_GLOBAL, + }, + RoleCompanyUser: {}, +} diff --git a/backend/data/session.go b/backend/data/session.go new file mode 100644 index 0000000..a3486e5 --- /dev/null +++ b/backend/data/session.go @@ -0,0 +1,8 @@ +package data + +const SessionCookieKey = "session" +const APIHeaderKey = "x-API" +const RequestAPISessionKey = "apiSession" + +const SystemSessionID = "00000000-0000-0111-0777-000000000000" +const APISessionID = "00000000-0000-0100-0000-000000000000" diff --git a/backend/data/urls.go b/backend/data/urls.go new file mode 100644 index 0000000..1232f92 --- /dev/null +++ b/backend/data/urls.go @@ -0,0 +1,11 @@ +package data + +import "github.com/phishingclub/phishingclub/build" + +// GetCrmURL returns the URL for the CRM system depending on the environment +func GetCrmURL() string { + if build.Flags.Production { + return "https://user.phishing.club" + } + return "https://crm:8009" +} diff --git a/backend/database/allowDeny.go b/backend/database/allowDeny.go new file mode 100644 index 0000000..16b87a9 --- /dev/null +++ b/backend/database/allowDeny.go @@ -0,0 +1,33 @@ +package database + +import ( + "time" + + "github.com/google/uuid" + "gorm.io/gorm" +) + +const ( + ALLOW_DENY_TABLE = "allow_denies" +) + +// AllowDeny is a gorm data model for allow deny listing +type AllowDeny struct { + ID *uuid.UUID `gorm:"primary_key;not null;unique;type:uuid"` + CreatedAt *time.Time `gorm:"not null;index;"` + UpdatedAt *time.Time `gorm:"not null;index"` + CompanyID *uuid.UUID `gorm:"uniqueIndex:idx_allow_denies_unique_name_and_company_id;type:uuid"` + Name string `gorm:"not null;uniqueIndex:idx_allow_denies_unique_name_and_company_id;"` + Cidrs string `gorm:"not null;"` + Allowed bool `gorm:"not null;"` +} + +func (AllowDeny) TableName() string { + return ALLOW_DENY_TABLE +} + +func (e *AllowDeny) Migrate(db *gorm.DB) error { + // SQLITE + // ensure name + company id is unique + return UniqueIndexNameAndNullCompanyID(db, "allow_denies") +} diff --git a/backend/database/apiSender.go b/backend/database/apiSender.go new file mode 100644 index 0000000..5390471 --- /dev/null +++ b/backend/database/apiSender.go @@ -0,0 +1,48 @@ +package database + +import ( + "time" + + "github.com/google/uuid" + "gorm.io/gorm" +) + +const ( + API_SENDER_TABLE = "api_senders" +) + +type APISender struct { + ID *uuid.UUID `gorm:"primary_key;not null;unique;type:uuid"` + CreatedAt *time.Time `gorm:"not null;index;"` + UpdatedAt *time.Time `gorm:"not null;index"` + Name string `gorm:"not null;uniqueIndex:idx_api_senders_name_company_id;"` + CompanyID *uuid.UUID `gorm:"uniqueIndex:idx_api_senders_name_company_id;type:uuid"` + + // Extra fields + APIKey string + CustomField1 string + CustomField2 string + CustomField3 string + CustomField4 string + + // Request fields + RequestMethod string + RequestURL string + RequestHeaders string + RequestBody string + + // Response fields + ExpectedResponseStatusCode int + ExpectedResponseHeaders string + ExpectedResponseBody string +} + +func (e *APISender) Migrate(db *gorm.DB) error { + // SQLITE + // ensure name + null company id is unique + return UniqueIndexNameAndNullCompanyID(db, "api_senders") +} + +func (APISender) TableName() string { + return API_SENDER_TABLE +} diff --git a/backend/database/apiSenderHeader.go b/backend/database/apiSenderHeader.go new file mode 100644 index 0000000..98561b3 --- /dev/null +++ b/backend/database/apiSenderHeader.go @@ -0,0 +1,26 @@ +package database + +import ( + "time" + + "github.com/google/uuid" +) + +type APISenderHeader struct { + ID *uuid.UUID `gorm:"primary_key;not null;unique;type:uuid"` + CreatedAt *time.Time `gorm:"not null;index;"` + UpdatedAt *time.Time `gorm:"not null;index"` + + Key string `gorm:"not null;"` + Value string `gorm:"not null;"` + // IsRequestHeader is true if the header is a request header + // and false if it is a expected response header + IsRequestHeader bool `gorm:"not null;"` + + // belongs to + APISenderID *uuid.UUID `gorm:"index;not null;type:uuid"` +} + +func (APISenderHeader) TableName() string { + return "api_sender_headers" +} diff --git a/backend/database/asset.go b/backend/database/asset.go new file mode 100644 index 0000000..8656742 --- /dev/null +++ b/backend/database/asset.go @@ -0,0 +1,33 @@ +package database + +import ( + "time" + + "github.com/google/uuid" +) + +const ( + ASSET_TABLE = "assets" +) + +// Asset is gorm data model +type Asset struct { + ID *uuid.UUID `gorm:"primary_key;not null;unique;type:uuid"` + CreatedAt *time.Time `gorm:"not null;index;"` + UpdatedAt *time.Time `gorm:"not null;index;"` + + // has one + DomainID *uuid.UUID `gorm:"index;type:uuid;"` + DomainName string + + // can has one + CompanyID *uuid.UUID `gorm:"index;type:uuid;"` + + Name string `gorm:";index"` + Description string `gorm:";"` + Path string `gorm:"not null;index"` +} + +func (Asset) TableName() string { + return ASSET_TABLE +} diff --git a/backend/database/attachment.go b/backend/database/attachment.go new file mode 100644 index 0000000..834853c --- /dev/null +++ b/backend/database/attachment.go @@ -0,0 +1,33 @@ +package database + +import ( + "time" + + "github.com/google/uuid" +) + +const ( + ATTACHMENT_TABLE = "attachments" +) + +// Attachment is gorm data model +type Attachment struct { + ID *uuid.UUID `gorm:"primary_key;not null;unique;type:uuid"` + CreatedAt *time.Time `gorm:"not null;index;"` + UpdatedAt *time.Time `gorm:"not null;index;"` + + // can has one + CompanyID *uuid.UUID `gorm:"index;type:uuid;"` + + // many to many + Mails []Email `gorm:"many2many:message_attachments;"` + + Name string `gorm:";index"` + Description string `gorm:";"` + Filename string `gorm:"not null;index"` + EmbeddedContent bool `gorm:"not null;default:false;index"` +} + +func (Attachment) TableName() string { + return ATTACHMENT_TABLE +} diff --git a/backend/database/campaign.go b/backend/database/campaign.go new file mode 100644 index 0000000..18a901c --- /dev/null +++ b/backend/database/campaign.go @@ -0,0 +1,76 @@ +package database + +import ( + "time" + + "github.com/google/uuid" + "gorm.io/gorm" +) + +const ( + CAMPAIGN_TABLE = "campaigns" +) + +// Campaign is gorm data model +type Campaign struct { + ID *uuid.UUID `gorm:"primary_key;not null;unique;type:uuid"` + CreatedAt *time.Time `gorm:"not null;index;"` + UpdatedAt *time.Time `gorm:"not null;index;"` + + CloseAt *time.Time `gorm:"index;"` + ClosedAt *time.Time `gorm:"index;"` + AnonymizeAt *time.Time `gorm:"index;"` + AnonymizedAt *time.Time `gorm:"index;"` + SortField string `gorm:";"` + SortOrder string `gorm:";"` // 'asc,desc,random' + SendStartAt *time.Time `gorm:"index;"` + SendEndAt *time.Time `gorm:"index;"` + + // ConstraintWeekDays is a binary format. + // 0b00000001 = 1 = sunday + // 0b00000010 = 2 = monday + // 0b00000100 = 4 = tuesday + // 0b00001000 = 8 = ... + // 0b00010000 = 16 = + // 0b00100000 = 32 = + // 0b01000000 = 64 = + ConstraintWeekDays *int `gorm:";"` + // hh:mm + ConstraintStartTime *string `gorm:"index;"` + // hh:mm + ConstraintEndTime *string `gorm:"index;"` + SaveSubmittedData bool `gorm:"not null;default:false"` + IsAnonymous bool `gorm:"not null;default:false"` + IsTest bool `gorm:"not null;default:false"` + + // has one + CampaignTemplateID *uuid.UUID `gorm:"index;type:uuid;"` + CampaignTemplate *CampaignTemplate + + // can has one + CompanyID *uuid.UUID `gorm:"index;type:uuid;index;uniqueIndex:idx_campaigns_unique_name_and_company_id;"` + Company *Company + DenyPageID *uuid.UUID `gorm:"type:uuid;index;"` + DenyPage *Page `gorm:"foreignKey:DenyPageID;references:ID"` + // NotableEventID notable event for this campaign + NotableEvent *Event `gorm:"foreignKey:NotableEventID;references:ID"` + NotableEventID *uuid.UUID `gorm:"type:uuid;index"` + + WebhookID *uuid.UUID `gorm:"type:uuid;index;"` + + // has many-to-many + RecipientGroups []*RecipientGroup `gorm:"many2many:campaign_recipient_groups"` + AllowDeny []*AllowDeny `gorm:"many2many:campaign_allow_denies"` + + Name string `gorm:"not null;uniqueIndex:idx_campaigns_unique_name_and_company_id"` +} + +func (c *Campaign) Migrate(db *gorm.DB) error { + // SQLITE + // ensure name + company id is unique + return UniqueIndexNameAndNullCompanyID(db, "campaigns") +} + +func (Campaign) TableName() string { + return CAMPAIGN_TABLE +} diff --git a/backend/database/campaignAllowDeny.go b/backend/database/campaignAllowDeny.go new file mode 100644 index 0000000..ea552b4 --- /dev/null +++ b/backend/database/campaignAllowDeny.go @@ -0,0 +1,22 @@ +package database + +import ( + "github.com/google/uuid" +) + +const ( + CAMPAIGN_ALLOW_DENY_TABLE = "campaign_allow_denies" +) + +// CampaignAllowDeny is a gorm data model +// is a table of those allow deny lists that belong to a campaign +type CampaignAllowDeny struct { + CampaignID *uuid.UUID `gorm:"not null;index;type:uuid;uniqueIndex:idx_campaign_allow_denies;"` + Campaign *Campaign + AllowDenyID *uuid.UUID `gorm:"not null;index;type:uuid;uniqueIndex:idx_campaign_allow_denies;"` + AllowDeny *AllowDeny +} + +func (CampaignAllowDeny) TableName() string { + return CAMPAIGN_ALLOW_DENY_TABLE +} diff --git a/backend/database/campaignEvent.go b/backend/database/campaignEvent.go new file mode 100644 index 0000000..aac4e67 --- /dev/null +++ b/backend/database/campaignEvent.go @@ -0,0 +1,52 @@ +package database + +import ( + "reflect" + "time" + + "github.com/google/uuid" +) + +const ( + CAMPAIGN_EVENT_TABLE = "campaign_events" +) + +// Campaign is gorm data model +type CampaignEvent struct { + ID *uuid.UUID `gorm:"primary_key;not null;unique;type:uuid"` + CreatedAt *time.Time `gorm:"not null;index;"` + UpdatedAt *time.Time `gorm:"not null;"` + + // arbitrary data + Data string `gorm:"not null;"` + + // has one + CampaignID *uuid.UUID `gorm:"not null;index;type:uuid;"` + EventID *uuid.UUID `gorm:"not null;index;type:uuid;"` + + // can has one + UserAgent string `gorm:";"` + IPAddress string `gorm:";"` + + // AnonymizedID is set when the recipient has been anonymized + AnonymizedID *uuid.UUID `gorm:"type:uuid;index;"` + // if null either the event has no recipient or the recipient has been anonymized + RecipientID *uuid.UUID `gorm:"index;type:uuid;"` + Recipient *Recipient + + CompanyID *uuid.UUID `gorm:"index;type:uuid;index;"` +} + +// RecipientCampaignEvent is a aggregated read-only model +type RecipientCampaignEvent struct { + CampaignEvent + + Name string // event name + CampaignName string +} + +func (CampaignEvent) TableName() string { + return CAMPAIGN_EVENT_TABLE +} + +var _ = reflect.TypeOf(RecipientCampaignEvent{}) diff --git a/backend/database/campaignRecipient.go b/backend/database/campaignRecipient.go new file mode 100644 index 0000000..fb29e68 --- /dev/null +++ b/backend/database/campaignRecipient.go @@ -0,0 +1,52 @@ +package database + +import ( + "time" + + "github.com/google/uuid" +) + +const ( + CAMPAIGN_RECIPIENT_TABLE_NAME = "campaign_recipients" +) + +// CampaigReciever is gorm data model +// this model/table is primarily used to keep track of who and when should recieve a campaign +type CampaignRecipient struct { + ID *uuid.UUID `gorm:"primary_key;not null;unique;type:uuid"` + + CreatedAt *time.Time `gorm:"not null;index;"` + UpdatedAt *time.Time `gorm:"not null;index;"` + + Campaign *Campaign + CampaignID *uuid.UUID `gorm:"not null;type:uuid;uniqueIndex:idx_campaign_recipients_campaign_id_recipient_id;"` + + // CancelledAt *time.Time `gorm:"index;"` + CancelledAt *time.Time `gorm:"index;"` + + // when it should be send + SendAt *time.Time `gorm:"index;"` + + // when it was last attempted send + LastAttemptAt *time.Time `gorm:"index;"` + + // when it was sent + SentAt *time.Time `gorm:"index;"` + + // self-managed + SelfManaged bool `gorm:"not null;default:false;"` + + // AnonymizedID is set when the recipient has been anonymized + AnonymizedID *uuid.UUID `gorm:"type:uuid;"` + Recipient *Recipient + // A null recipientID means that the data has been anonymized + RecipientID *uuid.UUID `gorm:"type:uuid;index;uniqueIndex:idx_campaign_recipients_campaign_id_recipient_id;"` + + // NotableEventID is the most notable event for this recipient + NotableEvent *Event `gorm:"foreignKey:NotableEventID;references:ID"` + NotableEventID *uuid.UUID `gorm:"type:uuid;index"` +} + +func (CampaignRecipient) TableName() string { + return CAMPAIGN_RECIPIENT_TABLE_NAME +} diff --git a/backend/database/campaignRecipientGroup.go b/backend/database/campaignRecipientGroup.go new file mode 100644 index 0000000..947757d --- /dev/null +++ b/backend/database/campaignRecipientGroup.go @@ -0,0 +1,19 @@ +package database + +import ( + "github.com/google/uuid" +) + +// CampaignRecipientGroup is gorm data model +// is a table of those recipient groups that belong to a campaign +type CampaignRecipientGroup struct { + CampaignID *uuid.UUID `gorm:"not null;index;type:uuid;uniqueIndex:idx_campaign_recipient_group;"` + Campaign *Campaign + + RecipientGroupID *uuid.UUID `gorm:"not null;index;type:uuid;uniqueIndex:idx_campaign_recipient_group;"` + RecipientGroup *RecipientGroup +} + +func (CampaignRecipientGroup) TableName() string { + return "campaign_recipient_groups" +} diff --git a/backend/database/campaignStats.go b/backend/database/campaignStats.go new file mode 100644 index 0000000..965dc1b --- /dev/null +++ b/backend/database/campaignStats.go @@ -0,0 +1,51 @@ +package database + +import ( + "time" + + "github.com/google/uuid" +) + +const ( + CAMPAIGN_STATS_TABLE = "campaign_stats" +) + +// CampaignStats is gorm data model for aggregated campaign statistics +type CampaignStats struct { + ID *uuid.UUID `gorm:"primary_key;not null;unique;type:uuid" json:"id"` + CreatedAt *time.Time `gorm:"not null;index;" json:"createdAt"` + UpdatedAt *time.Time `gorm:"not null;" json:"updatedAt"` + + // Campaign reference + CampaignID *uuid.UUID `gorm:"not null;unique;index;type:uuid;" json:"campaignId"` + CampaignName string `gorm:"not null;" json:"campaignName"` + CompanyID *uuid.UUID `gorm:"index;type:uuid;" json:"companyId"` // nullable for global campaigns + + // Time metrics + CampaignStartDate *time.Time `gorm:"index;" json:"campaignStartDate"` + CampaignEndDate *time.Time `gorm:"index;" json:"campaignEndDate"` + CampaignClosedAt *time.Time `gorm:"index;" json:"campaignClosedAt"` + + // Volume metrics + TotalRecipients int `gorm:"not null;default:0" json:"totalRecipients"` + TotalEvents int `gorm:"not null;default:0" json:"totalEvents"` + + // Event type breakdowns + EmailsSent int `gorm:"not null;default:0" json:"emailsSent"` + TrackingPixelLoaded int `gorm:"not null;default:0" json:"trackingPixelLoaded"` // Email opens + WebsiteVisits int `gorm:"not null;default:0" json:"websiteVisits"` // Link clicks + DataSubmissions int `gorm:"not null;default:0" json:"dataSubmissions"` // Form submissions + + // Success rates (as percentages for quick display) + OpenRate float64 `gorm:"not null;default:0" json:"openRate"` + ClickRate float64 `gorm:"not null;default:0" json:"clickRate"` + SubmissionRate float64 `gorm:"not null;default:0" json:"submissionRate"` + + // Campaign metadata + TemplateName string `gorm:"" json:"templateName"` + CampaignType string `gorm:"" json:"campaignType"` // 'scheduled', 'self-managed' +} + +func (CampaignStats) TableName() string { + return CAMPAIGN_STATS_TABLE +} diff --git a/backend/database/campaignTemplate.go b/backend/database/campaignTemplate.go new file mode 100644 index 0000000..4fcb23d --- /dev/null +++ b/backend/database/campaignTemplate.go @@ -0,0 +1,72 @@ +package database + +import ( + "time" + + "github.com/google/uuid" + "gorm.io/gorm" +) + +const ( + CAMPAIGN_TEMPLATE_TABLE = "campaign_templates" +) + +// CampaignTemplate is gorm data model +type CampaignTemplate struct { + ID *uuid.UUID `gorm:"primary_key;not null;unique;type:uuid"` + CreatedAt *time.Time `gorm:"not null;index;"` + UpdatedAt *time.Time `gorm:"not null;index;"` + + Name string `gorm:"not null;index;uniqueIndex:idx_campaign_templates_unique_name_and_company_id;"` + + URLPath string `gorm:"not null;default:'';index"` + + // IsUsable indicates if a template is usable based on if it has all the required + // data such as domainID, landingPage and etc to be used in a campaign + IsUsable bool `gorm:"not null;default:false;index"` + + // has-a + LandingPageID *uuid.UUID `gorm:"type:uuid;index;"` + LandingPage *Page `gorm:"references:LandingPage;foreignKey:LandingPageID;references:ID;"` + + DomainID *uuid.UUID `gorm:"type:uuid;index;"` + Domain *Domain `gorm:"foreignKey:DomainID"` + + URLIdentifierID *uuid.UUID `gorm:"not null;type:uuid;index"` + URLIdentifier *Identifier `gorm:"references:foreignKey:URLIdentifierID;references:ID"` + + StateIdentifierID *uuid.UUID `gorm:"type:uuid;index"` + StateIdentifier *Identifier `gorm:"references:foreignKey:StateIdentifierID;references:ID"` + + // has-a optional + BeforeLandingPageID *uuid.UUID `gorm:"type:uuid;index"` + BeforeLandingPage *Page `gorm:"foreignkey:BeforeLandingPageID;references:ID"` + + AfterLandingPageID *uuid.UUID `gorm:"type:uuid;index"` + AfterLandingPage *Page `gorm:"foreignKey:AfterLandingPageID;references:ID"` + + AfterLandingPageRedirectURL string `gorm:"not null;"` + + EmailID *uuid.UUID `gorm:"type:uuid;index;"` + Email *Email `gorm:"foreignKey:EmailID;references:ID;"` + + SMTPConfigurationID *uuid.UUID `gorm:"type:uuid;index;"` + SMTPConfiguration *SMTPConfiguration `gorm:"foreignKey:SMTPConfigurationID"` + + APISenderID *uuid.UUID `gorm:"type:uuid;index;"` + APISender *APISender `gorm:"foreignKey:APISenderID"` + + // can belong-to + CompanyID *uuid.UUID `gorm:"type:uuid;index;uniqueIndex:idx_campaign_templates_unique_name_and_company_id"` + Company *Company `gorm:"foreignKey:CompanyID"` +} + +func (e *CampaignTemplate) Migrate(db *gorm.DB) error { + // SQLITE + // ensure name + company id is unique + return UniqueIndexNameAndNullCompanyID(db, "campaign_templates") +} + +func (CampaignTemplate) TableName() string { + return CAMPAIGN_TEMPLATE_TABLE +} diff --git a/backend/database/company.go b/backend/database/company.go new file mode 100644 index 0000000..efde0e7 --- /dev/null +++ b/backend/database/company.go @@ -0,0 +1,26 @@ +package database + +import ( + "time" + + "github.com/google/uuid" +) + +const ( + COMPANY_TABLE = "companies" +) + +type Company struct { + ID uuid.UUID `gorm:"primary_key;not null;unique;type:uuid"` + CreatedAt *time.Time `gorm:"not null;index;"` + UpdatedAt *time.Time `gorm:"not null;index"` + Name string `gorm:"not null;unique;index"` + + // backref: many-to-one + Users []*User //`gorm:"foreignKey:CompanyID;"` + RecipientGroups []*RecipientGroup //`gorm:"foreignKey:CompanyID;"` +} + +func (Company) TableName() string { + return COMPANY_TABLE +} diff --git a/backend/database/domain.go b/backend/database/domain.go new file mode 100644 index 0000000..17ce013 --- /dev/null +++ b/backend/database/domain.go @@ -0,0 +1,32 @@ +package database + +import ( + "time" + + "github.com/google/uuid" +) + +const ( + DOMAIN_TABLE = "domains" +) + +// Domain is gorm data model +type Domain struct { + ID uuid.UUID `gorm:"primary_key;not null;unique;type:uuid;"` + CreatedAt *time.Time `gorm:"not null;index;"` + UpdatedAt *time.Time `gorm:"not null;index;"` + CompanyID *uuid.UUID `gorm:"index;type:uuid;"` + Name string `gorm:"not null;unique;"` + ManagedTLSCerts bool `gorm:"not null;index;default:false"` + OwnManagedTLS bool `gorm:"not null;index;default:false"` + HostWebsite bool `gorm:"not null;"` + PageContent string + PageNotFoundContent string + RedirectURL string + // could has-one + Company *Company +} + +func (Domain) TableName() string { + return DOMAIN_TABLE +} diff --git a/backend/database/email.go b/backend/database/email.go new file mode 100644 index 0000000..c4a2893 --- /dev/null +++ b/backend/database/email.go @@ -0,0 +1,47 @@ +package database + +import ( + "time" + + "github.com/google/uuid" + "gorm.io/gorm" +) + +const ( + EMAIL_TABLE = "emails" +) + +// Email is a gorm data model +type Email struct { + ID *uuid.UUID `gorm:"primary_key;not null;unique;type:uuid"` + CreatedAt *time.Time `gorm:"not null;index;"` + UpdatedAt *time.Time `gorm:"not null;index"` + Name string `gorm:"not null;index;uniqueIndex:idx_emails_name_company_id;"` + Content string `gorm:"not null;"` + + AddTrackingPixel bool `gorm:"not null;"` + + // mail fields + // Envelope header - Bounce / Return-Path + MailFrom string `gorm:"not null;"` + // Mail header + Subject string `gorm:"not null;"` + From string `gorm:"not null;"` + + // many to many + Attachments []*Attachment `gorm:"many2many:email_attachments;"` + + // can belong to + CompanyID *uuid.UUID `gorm:"index;type:uuid;uniqueIndex:idx_emails_name_company_id;"` + Company *Company +} + +func (e *Email) Migrate(db *gorm.DB) error { + // SQLITE + // ensure name + null company id is unique + return UniqueIndexNameAndNullCompanyID(db, "emails") +} + +func (Email) TableName() string { + return EMAIL_TABLE +} diff --git a/backend/database/emailAttachment.go b/backend/database/emailAttachment.go new file mode 100644 index 0000000..69da4e9 --- /dev/null +++ b/backend/database/emailAttachment.go @@ -0,0 +1,16 @@ +package database + +import ( + "github.com/google/uuid" +) + +// EmailAttachment is a gorm data model +// it is a many to many relationship between messages and attachments +type EmailAttachment struct { + EmailID *uuid.UUID `gorm:"primary_key;not null;index;type:uuid;unique_index:idx_message_attachment;"` + AttachmentID *uuid.UUID `gorm:"primary_key;not null;index;type:uuid;unique_index:idx_message_attachment;"` +} + +func (EmailAttachment) TableName() string { + return "email_attachments" +} diff --git a/backend/database/events.go b/backend/database/events.go new file mode 100644 index 0000000..7c10854 --- /dev/null +++ b/backend/database/events.go @@ -0,0 +1,21 @@ +package database + +import ( + "time" + + "github.com/google/uuid" +) + +const ( + EVENT_TABLE = "events" +) + +type Event struct { + ID *uuid.UUID `gorm:"primary_key;not null;unique;type:uuid"` + CreatedAt *time.Time `gorm:"not null;index;"` + Name string `gorm:"not null;index;"` +} + +func (Event) TableName() string { + return EVENT_TABLE +} diff --git a/backend/database/factory.go b/backend/database/factory.go new file mode 100644 index 0000000..d97365e --- /dev/null +++ b/backend/database/factory.go @@ -0,0 +1,43 @@ +package database + +import ( + "fmt" + + "github.com/phishingclub/phishingclub/config" + "github.com/phishingclub/phishingclub/errs" + "gorm.io/driver/sqlite" + "gorm.io/gorm" + "gorm.io/gorm/logger" +) + +// FromConfig database factory from config +func FromConfig(conf config.Config) (*gorm.DB, error) { + var db *gorm.DB + switch conf.Database().Engine { + case config.DefaultAdministrationUseSqlite: + var err error + dsn := fmt.Sprintf( + "%s?_journal_mode=WAL&_busy_timeout=5000&_synchronous=NORMAL&_foreign_keys=ON", + conf.Database().DSN, + ) + db, err = gorm.Open(sqlite.Open(dsn), &gorm.Config{ + Logger: logger.Default.LogMode(logger.Silent), + }) + if err != nil { + return nil, errs.Wrap(err) + } + // SetMaxOpenConns sets the maximum number of open connections to the database. + // without this, gorutines doing simultaneous db operations will cause + // "database is locked" error when using sqlite with a high concurrency + // this is because sqlite only allows one write operation at a time + // and locks the whole database for the duration any write operation + innerDB, err := db.DB() + if err != nil { + return nil, errs.Wrap(err) + } + innerDB.SetMaxIdleConns(1) + default: + return nil, config.ErrInvalidDatabase + } + return db, nil +} diff --git a/backend/database/identifiers.go b/backend/database/identifiers.go new file mode 100644 index 0000000..3415cb6 --- /dev/null +++ b/backend/database/identifiers.go @@ -0,0 +1,18 @@ +package database + +import ( + "github.com/google/uuid" +) + +const ( + IDENTIFIER_TABLE = "identifiers" +) + +type Identifier struct { + ID *uuid.UUID `gorm:"primary_key;not null;unique;type:uuid"` + Name string `gorm:"not null;uniqueIndex"` +} + +func (Identifier) TableName() string { + return IDENTIFIER_TABLE +} diff --git a/backend/database/option.go b/backend/database/option.go new file mode 100644 index 0000000..30c287f --- /dev/null +++ b/backend/database/option.go @@ -0,0 +1,16 @@ +package database + +import ( + "github.com/google/uuid" +) + +// Option is a database option (options stored in the database) +type Option struct { + ID *uuid.UUID `gorm:"primary_key;not null;unique;type:uuid"` + Key string `gorm:"not null;unique;index"` + Value string `gorm:"not null;"` +} + +func (Option) TableName() string { + return "options" +} diff --git a/backend/database/page.go b/backend/database/page.go new file mode 100644 index 0000000..1f37cfe --- /dev/null +++ b/backend/database/page.go @@ -0,0 +1,35 @@ +package database + +import ( + "time" + + "github.com/google/uuid" + "gorm.io/gorm" +) + +const ( + PAGE_TABLE = "pages" +) + +// Page is a gorm data model +type Page struct { + ID *uuid.UUID `gorm:"primary_key;not null;unique;type:uuid"` + CreatedAt *time.Time `gorm:"not null;index;"` + UpdatedAt *time.Time `gorm:"not null;index"` + CompanyID *uuid.UUID `gorm:"index;uniqueIndex:idx_pages_unique_name_and_company_id;type:uuid"` + Name string `gorm:"not null;index;uniqueIndex:idx_pages_unique_name_and_company_id;"` + Content string `gorm:"not null;"` + + // could has-one + Company *Company +} + +func (e *Page) Migrate(db *gorm.DB) error { + // SQLITE + // ensure name + company id is unique + return UniqueIndexNameAndNullCompanyID(db, "pages") +} + +func (Page) TableName() string { + return PAGE_TABLE +} diff --git a/backend/database/recipient.go b/backend/database/recipient.go new file mode 100644 index 0000000..f04e1a8 --- /dev/null +++ b/backend/database/recipient.go @@ -0,0 +1,42 @@ +package database + +import ( + "time" + + "github.com/google/uuid" +) + +const ( + RECIPIENT_TABLE = "recipients" +) + +// Recipient is a gorm data model +type Recipient struct { + ID *uuid.UUID `gorm:"primary_key;not null;unique;type:uuid"` + CreatedAt *time.Time `gorm:"not null;index;"` + UpdatedAt *time.Time `gorm:"not null;index"` + DeletedAt *time.Time `gorm:"index;"` + + Email *string `gorm:";uniqueIndex"` + Phone *string `gorm:";index"` + ExtraIdentifier *string `gorm:";index"` + + FirstName string `gorm:";"` + LastName string `gorm:";"` + Position string `gorm:";"` + Department string `gorm:";"` + City string `gorm:";"` + Country string `gorm:";"` + Misc string `gorm:";"` + + // can belong to + CompanyID *uuid.UUID `gorm:"type:uuid;index;"` + Company *Company + + // many-to-many + Groups []RecipientGroup `gorm:"many2many:recipient_group_recipients;"` +} + +func (Recipient) TableName() string { + return RECIPIENT_TABLE +} diff --git a/backend/database/recipientCampaignEventView.go b/backend/database/recipientCampaignEventView.go new file mode 100644 index 0000000..cf11ede --- /dev/null +++ b/backend/database/recipientCampaignEventView.go @@ -0,0 +1,9 @@ +package database + +// RecipientCampaignEventView is a view read-only model +type RecipientCampaignEventView struct { + CampaignEvent + + Name string // event name + CampaignName string +} diff --git a/backend/database/recipientGroup.go b/backend/database/recipientGroup.go new file mode 100644 index 0000000..20aa78e --- /dev/null +++ b/backend/database/recipientGroup.go @@ -0,0 +1,38 @@ +package database + +import ( + "time" + + "github.com/google/uuid" + "gorm.io/gorm" +) + +const ( + RECIPIENT_GROUP_TABLE = "recipient_groups" +) + +// RecipientGroup is a grouping of recipient +type RecipientGroup struct { + ID *uuid.UUID `gorm:"primary_key;not null;unique;type:uuid"` + CreatedAt *time.Time `gorm:"not null;index;"` + UpdatedAt *time.Time `gorm:"not null;index;"` + + Name string `gorm:"not null;index;uniqueIndex:idx_recipient_groups_unique_name_and_company_id;"` + + // can belong-to + CompanyID *uuid.UUID `gorm:"type:uuid;index;uniqueIndex:idx_recipient_groups_unique_name_and_company_id"` + Company *Company + + // many-to-many + Recipients []Recipient `gorm:"many2many:recipient_group_recipients;"` +} + +func (e *RecipientGroup) Migrate(db *gorm.DB) error { + // SQLITE + // ensure name + company id is unique + return UniqueIndexNameAndNullCompanyID(db, "recipient_groups") +} + +func (RecipientGroup) TableName() string { + return RECIPIENT_GROUP_TABLE +} diff --git a/backend/database/recipientGroupRecipient.go b/backend/database/recipientGroupRecipient.go new file mode 100644 index 0000000..8424b37 --- /dev/null +++ b/backend/database/recipientGroupRecipient.go @@ -0,0 +1,22 @@ +package database + +import ( + "github.com/google/uuid" +) + +const ( + RECIPIENT_GROUP_RECIPIENT_TABLE = "recipient_group_recipients" +) + +// RecipientGroupRecipient is a grouping of recipients and recipient groups +type RecipientGroupRecipient struct { + Recipient *Recipient + RecipientID *uuid.UUID `gorm:"not null;uniqueIndex:idx_recipient_group"` + + RecipientGroup *RecipientGroup + RecipientGroupID *uuid.UUID `gorm:"not null;uniqueIndex:idx_recipient_group"` +} + +func (RecipientGroupRecipient) TableName() string { + return RECIPIENT_GROUP_RECIPIENT_TABLE +} diff --git a/backend/database/role.go b/backend/database/role.go new file mode 100644 index 0000000..298e315 --- /dev/null +++ b/backend/database/role.go @@ -0,0 +1,18 @@ +package database + +import ( + "github.com/google/uuid" +) + +// Role is a role +type Role struct { + ID *uuid.UUID `gorm:"primary_key;not null;unique;type:uuid"` + Name string `gorm:"not null;index;unique;"` + + // one-to-many + Users []*User +} + +func (Role) TableName() string { + return "roles" +} diff --git a/backend/database/session.go b/backend/database/session.go new file mode 100644 index 0000000..fc0fac0 --- /dev/null +++ b/backend/database/session.go @@ -0,0 +1,32 @@ +package database + +import ( + "time" + + "github.com/google/uuid" +) + +const ( + SESSION_TABLE = "sessions" +) + +type Session struct { + ID *uuid.UUID `gorm:"primary_key;not null;unique;type:uuid"` + CreatedAt *time.Time `gorm:"not null;index"` + UpdatedAt *time.Time `gorm:"not null;index"` + // IP address of the user when the session was created + IPAddress string `gorm:"not null;index;default:''"` + // the expiresAt is the time when the session will expire, nomatter the maxAgeAt + ExpiresAt *time.Time `gorm:"not null;index"` + // the maxAgeAt is the time when the session will expire, nomatter the expiresAt + MaxAgeAt *time.Time `gorm:"not null;index"` + // has-one + // + // belongs to + UserID string `gorm:";type:uuid;"` + User *User +} + +func (Session) TableName() string { + return SESSION_TABLE +} diff --git a/backend/database/smtpConfiguration.go b/backend/database/smtpConfiguration.go new file mode 100644 index 0000000..7764903 --- /dev/null +++ b/backend/database/smtpConfiguration.go @@ -0,0 +1,43 @@ +package database + +import ( + "time" + + "github.com/google/uuid" + "gorm.io/gorm" +) + +const ( + SMTP_CONFIGURATION_TABLE = "smtp_configurations" +) + +// SMTPConfiguration is a page gorm data model +// Simple Mail Transfer Protocol +type SMTPConfiguration struct { + ID uuid.UUID `gorm:"primary_key;not null;unique;type:uuid"` + CreatedAt *time.Time `gorm:"not null;index;"` + UpdatedAt *time.Time `gorm:"not null;index;"` + Name string `gorm:"not null;uniqueIndex:idx_smtp_configurations_unique_name_and_company_id;"` + Host string `gorm:"not null;"` + Port uint16 `gorm:"not null;"` + Username string `gorm:"not null;"` + Password string `gorm:"not null;"` + IgnoreCertErrors bool `gorm:"not null;"` + + // back-reference + Headers []*SMTPHeader + + // can belong-to + CompanyID *uuid.UUID `gorm:"uniqueIndex:idx_smtp_configurations_unique_name_and_company_id;"` + Company *Company `gorm:"foreignkey:CompanyID;"` +} + +func (e *SMTPConfiguration) Migrate(db *gorm.DB) error { + // SQLITE + // ensure name + company id is unique + return UniqueIndexNameAndNullCompanyID(db, "smtp_configurations") +} + +func (SMTPConfiguration) TableName() string { + return SMTP_CONFIGURATION_TABLE +} diff --git a/backend/database/smtpHeader.go b/backend/database/smtpHeader.go new file mode 100644 index 0000000..b46c28d --- /dev/null +++ b/backend/database/smtpHeader.go @@ -0,0 +1,24 @@ +package database + +import ( + "time" + + "github.com/google/uuid" +) + +// SMTPHeader is headers sent with specific SMTP configurations +type SMTPHeader struct { + ID *uuid.UUID `gorm:"primary_key;not null;unique;type:uuid"` + CreatedAt *time.Time `gorm:"not null;index;"` + UpdatedAt *time.Time `gorm:"not null;index;"` + Key string `gorm:"not null;"` + Value string `gorm:"not null;"` + + // belongs to + SMTPConfigurationID *uuid.UUID `gorm:"index;not null;type:uuid"` + SMTP *SMTPConfiguration `gorm:"foreignKey:SMTPConfigurationID"` +} + +func (SMTPHeader) TableName() string { + return "smtp_headers" +} diff --git a/backend/database/user.go b/backend/database/user.go new file mode 100644 index 0000000..fc6f924 --- /dev/null +++ b/backend/database/user.go @@ -0,0 +1,49 @@ +package database + +import ( + "time" + + "github.com/google/uuid" + "gorm.io/gorm" +) + +const ( + USER_TABLE = "users" +) + +// User is a database user +type User struct { + ID *uuid.UUID `gorm:"primary_key;not null;unique;type:uuid"` + CreatedAt *time.Time `gorm:"not null;index;"` + UpdatedAt *time.Time `gorm:"not null;index"` + DeletedAt gorm.DeletedAt `gorm:"index;"` + + Name string `gorm:"not null;"` + Username string `gorm:"not null;unique;"` + Email string `gorm:"unique;"` + PasswordHash string `gorm:"type:varchar(255);"` + RequirePasswordRenew bool `gorm:"default:false;"` + + // MFA + TOTPEnabled bool `gorm:"default:false;"` + TOTPSecret string + TOTPAuthURL string + // TODO rename to MFARecoveryCode + TOTPRecoveryCode string `gorm:"type:varchar(64);"` + + // SSO id + SSOID string + + // maybe has one + CompanyID *uuid.UUID `gorm:"type:uuid;index;"` + Company *Company + // has one + RoleID *uuid.UUID `gorm:"not null;type:uuid;index"` + Role *Role + // APIKey + APIKey string `gorm:"index"` +} + +func (User) TableName() string { + return USER_TABLE +} diff --git a/backend/database/utils.go b/backend/database/utils.go new file mode 100644 index 0000000..6188289 --- /dev/null +++ b/backend/database/utils.go @@ -0,0 +1,23 @@ +package database + +import ( + "fmt" + + "gorm.io/gorm" +) + +type Migrater interface { + Migrate(db *gorm.DB) error +} + +func UniqueIndexNameAndNullCompanyID(db *gorm.DB, tableName string) error { + // SQLITE / POSTGRES + // ensure name + null company id is unique + idx := fmt.Sprintf("CREATE UNIQUE INDEX IF NOT EXISTS idx_%s_name_null_company_id ON %s (name) WHERE (company_id IS NULL)", tableName, tableName) + res := db.Exec(idx) + if res.Error != nil { + return fmt.Errorf("error creating index: %v on table %s", res.Error, tableName) + } + + return nil +} diff --git a/backend/database/webhook.go b/backend/database/webhook.go new file mode 100644 index 0000000..bd1680b --- /dev/null +++ b/backend/database/webhook.go @@ -0,0 +1,33 @@ +package database + +import ( + "time" + + "github.com/google/uuid" + "gorm.io/gorm" +) + +const ( + WEBHOOK_TABLE = "webhooks" +) + +// Webhook is a gorm data model for webhooks +type Webhook struct { + ID *uuid.UUID `gorm:"primary_key;not null;unique;type:uuid"` + CreatedAt *time.Time `gorm:"not null;index;"` + UpdatedAt *time.Time `gorm:"not null;index;"` + CompanyID *uuid.UUID `gorm:"uniqueIndex:idx_webhooks_unique_name_and_company_id;type:uuid"` + Name string `gorm:"not null;uniqueIndex:idx_webhooks_unique_name_and_company_id;"` + URL string `gorm:"not null;"` + Secret string +} + +func (e *Webhook) Migrate(db *gorm.DB) error { + // SQLITE + // ensure name + company id is unique + return UniqueIndexNameAndNullCompanyID(db, "webhooks") +} + +func (Webhook) TableName() string { + return WEBHOOK_TABLE +} diff --git a/backend/embedded/files.go b/backend/embedded/files.go new file mode 100644 index 0000000..5614e20 --- /dev/null +++ b/backend/embedded/files.go @@ -0,0 +1,18 @@ +package embedded + +import ( + _ "embed" +) + +//go:embed tracking-pixel/sendgrid/open.gif +var TrackingPixel []byte + +// SigningKey1 is verifing the signed .sig file when updating +// +//go:embed signingkeys/public1.bin +var SigningKey1 []byte + +// SigningKey2 is a extra verification key if key 1 is lost +// +//go:embed signingkeys/public2.bin +var SigningKey2 []byte diff --git a/backend/embedded/signingkeys/public1.bin b/backend/embedded/signingkeys/public1.bin new file mode 100644 index 0000000..7842f14 --- /dev/null +++ b/backend/embedded/signingkeys/public1.bin @@ -0,0 +1 @@ +0XK$DH (Ul̀Y}N \ No newline at end of file diff --git a/backend/embedded/signingkeys/public2.bin b/backend/embedded/signingkeys/public2.bin new file mode 100644 index 0000000..da0f670 --- /dev/null +++ b/backend/embedded/signingkeys/public2.bin @@ -0,0 +1 @@ +.B)쒤K~X$Rz^g diff --git a/backend/embedded/tracking-pixel/sendgrid/open.gif b/backend/embedded/tracking-pixel/sendgrid/open.gif new file mode 100644 index 0000000..35d42e8 Binary files /dev/null and b/backend/embedded/tracking-pixel/sendgrid/open.gif differ diff --git a/backend/errs/all.go b/backend/errs/all.go new file mode 100644 index 0000000..7cfefd8 --- /dev/null +++ b/backend/errs/all.go @@ -0,0 +1,114 @@ +package errs + +import ( + goerrors "errors" + + "github.com/go-errors/errors" +) + +// errors and messages +var ( + // db + ErrDBSeedFailure = goerrors.New("failed to seed db") + + // install + ErrAlreadyInstalled = goerrors.New("already installed") + + // auth and permissions + ErrAuthenticationFailed = goerrors.New("authentication failed") + ErrAuthorizationFailed = goerrors.New("authorization error") + + // mapping + ErrMappingDBToEntityFailed = goerrors.New("failed to map db to entity") + + // audit + ErrAuditFailedToSave = goerrors.New("failed to save audit") + + // user + ErrUserWrongPasword = goerrors.New("wrong password") + ErrUserWrongTOTP = goerrors.New("incorrect code") + ErrUserWrongRecoveryCode = goerrors.New("incorrect recovery code") + + // session + ErrSessionCookieNotFound = goerrors.New("session cookie not found") + + // campaign + ErrCampaignAlreadySetToClose = goerrors.New("campaign already set to closed") + ErrCampaignAlreadyClosed = goerrors.New("campaign already closed") + ErrCampaignAlreadyAnonymized = goerrors.New("campaign already anonymized") + + // validation err + ErrValidationFailed = goerrors.New("validation error") + + // license + ErrLicenseMismatchSignature = goerrors.New("signature does not match") + ErrLicenseExpired = goerrors.New("expired") + ErrLicenseEditionMismatch = goerrors.New("edition does not match subscription") + ErrLicenseNotValid = goerrors.New("license is not valid") + ErrLicenseRequestFailed = goerrors.New("license request failed") + ErrLicenseInvalidKey = goerrors.New("invalid license key") + + // update + ErrNoUpdateAvailable = goerrors.New("no update available") + + // sso + ErrSSODisabled = goerrors.New("SSO disabled") +) + +// format messages +const ( + MsgPasswordRenewRequired = "New password required" + MsgFailedToParseRequest = "failed to parse request" + MsgFailedToParseUUID = "failed to parse uuid" + MsgfFailedToParseCompanyUUID = "failed to parse company uuid: %s" + MsgfFailedToMakeName = "failed to make name: %s" + MsgfFailedToParseTypeID = "failed to parse message type uuid: %s" + MsgfInvalidOffsetOrLimit = "invalid offset or limit: %s" +) + +func Wrap(err error) error { + if err == nil { + return nil + } + // we only wrap an error once + if _, ok := err.(*errors.Error); ok { + return err + } + return errors.Wrap(err, 0) +} + +// ValidationError is a validation error +type ValidationError struct { + Err error +} + +// NewValidationError creates a new validation error +func NewValidationError(err error) error { + return ValidationError{ + Err: err, + } +} + +// Error returns the validation error +func (e ValidationError) Error() string { + return e.Err.Error() +} + +// CustomError is a custom error +type CustomError struct { + Err error +} + +// NewCustomError creates a new custom error +// it is used when a custom error message should be +// returned to the consumer +func NewCustomError(err error) error { + return CustomError{ + Err: err, + } +} + +// Error returns the custom error +func (e CustomError) Error() string { + return e.Err.Error() +} diff --git a/backend/file/file.go b/backend/file/file.go new file mode 100644 index 0000000..659c677 --- /dev/null +++ b/backend/file/file.go @@ -0,0 +1,36 @@ +package file + +import ( + "fmt" + "io/fs" + "os" +) + +// Write writes data to a file +type Writer interface { + Write(filepath string, data []byte, flag int, perm fs.FileMode) (int, error) +} + +// Write writes data to a file +// returns bytes written or error +func Write(filepath string, data []byte, flag int, perm fs.FileMode) (int, error) { + // #nosec + file, err := os.OpenFile(filepath, flag, perm) + if err != nil { + return 0, fmt.Errorf("failed to create file: %w", err) + } + defer file.Close() + b, err := file.Write(data) + if err != nil { + return b, fmt.Errorf("failed to write to file: %w", err) + } + return b, nil +} + +// FileWriter is a file writer +type FileWriter struct{} + +// Write writes data to a file +func (w FileWriter) Write(filepath string, data []byte, flag int, perm fs.FileMode) (int, error) { + return Write(filepath, data, flag, perm) +} diff --git a/backend/file/filemock/file.go b/backend/file/filemock/file.go new file mode 100644 index 0000000..87243aa --- /dev/null +++ b/backend/file/filemock/file.go @@ -0,0 +1,16 @@ +package filemock + +import ( + "io/fs" + + "github.com/stretchr/testify/mock" +) + +type Writer struct { + mock.Mock +} + +func (m *Writer) Write(filepath string, data []byte, flag int, perm fs.FileMode) (int, error) { + args := m.Called(filepath, data, flag, perm) + return args.Int(0), args.Error(1) +} diff --git a/backend/frontend/embed.go b/backend/frontend/embed.go new file mode 100644 index 0000000..168a28a --- /dev/null +++ b/backend/frontend/embed.go @@ -0,0 +1,54 @@ +package frontend + +import ( + "embed" + "html/template" + "io/fs" + "regexp" + "strings" + + "github.com/gin-gonic/gin" +) + +// The version of gin I used when writting this, did not support using embeded files as html +// so I found this solution on good old https://stackoverflow.com/questions/26537299/golang-gin-framework-status-code-without-message-body + +// LoadHTMLFromEmbedFS loads all files from the embeded file system that match the pattern +func LoadHTMLFromEmbedFS(engine *gin.Engine, embedFS embed.FS, pattern string) { + root := template.New("") + tmpl := template.Must(root, LoadAndAddToRoot(engine.FuncMap, root, embedFS, pattern)) + engine.SetHTMLTemplate(tmpl) +} + +// LoadAndAddToRoot loads all files from the embeded file system that match the pattern and adds them to the root template +// +// Usage: +// +// func (engine *gin.Engine) LoadHTMLFromFS(embedFS embed.FS, pattern string) { +// root := template.New("") +// tmpl := template.Must(root, LoadAndAddToRoot(engine.FuncMap, root, embedFS, pattern)) +// engine.SetHTMLTemplate(tmpl) +// } +func LoadAndAddToRoot(funcMap template.FuncMap, rootTemplate *template.Template, embedFS embed.FS, pattern string) error { + pattern = strings.ReplaceAll(pattern, ".", "\\.") + pattern = strings.ReplaceAll(pattern, "*", ".*") + + err := fs.WalkDir(embedFS, ".", func(path string, d fs.DirEntry, walkErr error) error { + if walkErr != nil { + return walkErr + } + + if matched, _ := regexp.MatchString(pattern, path); !d.IsDir() && matched { + data, readErr := embedFS.ReadFile(path) + if readErr != nil { + return readErr + } + t := rootTemplate.New(path).Funcs(funcMap) + if _, parseErr := t.Parse(string(data)); parseErr != nil { + return parseErr + } + } + return nil + }) + return err +} diff --git a/backend/frontend/frontend.go b/backend/frontend/frontend.go new file mode 100644 index 0000000..79f480c --- /dev/null +++ b/backend/frontend/frontend.go @@ -0,0 +1,13 @@ +//go:build !dev + +package frontend + +import "embed" + +//go:embed build/* +var content embed.FS + +// GetEmbededFS returns the embeded file system that contains the frontend +func GetEmbededFS() *embed.FS { + return &content +} diff --git a/backend/frontend/frontend_dev.go b/backend/frontend/frontend_dev.go new file mode 100644 index 0000000..0163361 --- /dev/null +++ b/backend/frontend/frontend_dev.go @@ -0,0 +1,14 @@ +//go:build dev + +package frontend + +import "embed" + +// In dev mode no files are embeded +// all files are served from the filesystem at runtime +var content embed.FS + +// GetEmbededFS returns the embeded file system that contains the frontend +func GetEmbededFS() *embed.FS { + return &content +} diff --git a/backend/go.mod b/backend/go.mod new file mode 100644 index 0000000..c88296f --- /dev/null +++ b/backend/go.mod @@ -0,0 +1,90 @@ +module github.com/phishingclub/phishingclub + +go 1.23.6 + +require ( + github.com/AzureAD/microsoft-authentication-library-for-go v1.3.2 + github.com/boombuler/barcode v1.0.1 + github.com/brianvoe/gofakeit/v7 v7.0.4 + github.com/caddyserver/certmagic v0.19.2 + github.com/charmbracelet/bubbles v0.20.0 + github.com/charmbracelet/bubbletea v1.3.4 + github.com/charmbracelet/lipgloss v1.1.0 + github.com/cyphar/filepath-securejoin v0.3.4 + github.com/fatih/color v1.15.0 + github.com/gin-contrib/zap v1.1.4 + github.com/gin-gonic/gin v1.10.0 + github.com/go-errors/errors v1.5.1 + github.com/google/uuid v1.3.0 + github.com/oapi-codegen/nullable v1.1.0 + github.com/pquerna/otp v1.4.0 + github.com/stretchr/testify v1.9.0 + github.com/wneessen/go-mail v0.4.2-0.20240324213705-f60ef348aa29 + github.com/yeqown/go-qrcode/v2 v2.2.4 + go.uber.org/zap v1.27.0 + golang.org/x/crypto v0.37.0 + golang.org/x/mod v0.24.0 + golang.org/x/net v0.34.0 + golang.org/x/time v0.3.0 + gopkg.in/yaml.v3 v3.0.1 + gorm.io/driver/sqlite v1.6.0 + gorm.io/gorm v1.30.1 +) + +require ( + github.com/atotto/clipboard v0.1.4 // indirect + github.com/aymanbagabas/go-osc52/v2 v2.0.1 // indirect + github.com/bytedance/sonic v1.12.1 // indirect + github.com/bytedance/sonic/loader v0.2.0 // indirect + github.com/charmbracelet/colorprofile v0.2.3-0.20250311203215-f60798e515dc // indirect + github.com/charmbracelet/x/ansi v0.8.0 // indirect + github.com/charmbracelet/x/cellbuf v0.0.13-0.20250311204145-2c3ea96c31dd // indirect + github.com/charmbracelet/x/term v0.2.1 // indirect + github.com/cloudwego/base64x v0.1.4 // indirect + github.com/cloudwego/iasm v0.2.0 // indirect + github.com/davecgh/go-spew v1.1.1 // indirect + github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f // indirect + github.com/gabriel-vasile/mimetype v1.4.5 // indirect + github.com/gin-contrib/sse v0.1.0 // indirect + github.com/go-playground/locales v0.14.1 // indirect + github.com/go-playground/universal-translator v0.18.1 // indirect + github.com/go-playground/validator/v10 v10.22.0 // indirect + github.com/goccy/go-json v0.10.3 // indirect + github.com/golang-jwt/jwt/v5 v5.0.0 // indirect + github.com/jinzhu/inflection v1.0.0 // indirect + github.com/jinzhu/now v1.1.5 // indirect + github.com/json-iterator/go v1.1.12 // indirect + github.com/klauspost/cpuid/v2 v2.2.8 // indirect + github.com/kylelemons/godebug v1.1.0 // indirect + github.com/leodido/go-urn v1.4.0 // indirect + github.com/libdns/libdns v0.2.1 // indirect + github.com/lucasb-eyer/go-colorful v1.2.0 // indirect + github.com/mattn/go-colorable v0.1.13 // indirect + github.com/mattn/go-isatty v0.0.20 // indirect + github.com/mattn/go-localereader v0.0.1 // indirect + github.com/mattn/go-runewidth v0.0.16 // indirect + github.com/mattn/go-sqlite3 v1.14.22 // indirect + github.com/mholt/acmez v1.2.0 // indirect + github.com/miekg/dns v1.1.55 // indirect + github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect + github.com/modern-go/reflect2 v1.0.2 // indirect + github.com/muesli/ansi v0.0.0-20230316100256-276c6243b2f6 // indirect + github.com/muesli/cancelreader v0.2.2 // indirect + github.com/muesli/termenv v0.16.0 // indirect + github.com/pelletier/go-toml/v2 v2.2.2 // indirect + github.com/pmezard/go-difflib v1.0.0 // indirect + github.com/rivo/uniseg v0.4.7 // indirect + github.com/stretchr/objx v0.5.2 // indirect + github.com/twitchyliquid64/golang-asm v0.15.1 // indirect + github.com/ugorji/go/codec v1.2.12 // indirect + github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e // indirect + github.com/yeqown/reedsolomon v1.0.0 // indirect + github.com/zeebo/blake3 v0.2.3 // indirect + go.uber.org/multierr v1.11.0 // indirect + golang.org/x/arch v0.9.0 // indirect + golang.org/x/sync v0.13.0 // indirect + golang.org/x/sys v0.32.0 // indirect + golang.org/x/text v0.24.0 // indirect + golang.org/x/tools v0.29.0 // indirect + google.golang.org/protobuf v1.34.2 // indirect +) diff --git a/backend/go.sum b/backend/go.sum new file mode 100644 index 0000000..544e13f --- /dev/null +++ b/backend/go.sum @@ -0,0 +1,201 @@ +github.com/AzureAD/microsoft-authentication-library-for-go v1.3.2 h1:kYRSnvJju5gYVyhkij+RTJ/VR6QIUaCfWeaFm2ycsjQ= +github.com/AzureAD/microsoft-authentication-library-for-go v1.3.2/go.mod h1:wP83P5OoQ5p6ip3ScPr0BAq0BvuPAvacpEuSzyouqAI= +github.com/atotto/clipboard v0.1.4 h1:EH0zSVneZPSuFR11BlR9YppQTVDbh5+16AmcJi4g1z4= +github.com/atotto/clipboard v0.1.4/go.mod h1:ZY9tmq7sm5xIbd9bOK4onWV4S6X0u6GY7Vn0Yu86PYI= +github.com/aymanbagabas/go-osc52/v2 v2.0.1 h1:HwpRHbFMcZLEVr42D4p7XBqjyuxQH5SMiErDT4WkJ2k= +github.com/aymanbagabas/go-osc52/v2 v2.0.1/go.mod h1:uYgXzlJ7ZpABp8OJ+exZzJJhRNQ2ASbcXHWsFqH8hp8= +github.com/boombuler/barcode v1.0.1-0.20190219062509-6c824513bacc/go.mod h1:paBWMcWSl3LHKBqUq+rly7CNSldXjb2rDl3JlRe0mD8= +github.com/boombuler/barcode v1.0.1 h1:NDBbPmhS+EqABEs5Kg3n/5ZNjy73Pz7SIV+KCeqyXcs= +github.com/boombuler/barcode v1.0.1/go.mod h1:paBWMcWSl3LHKBqUq+rly7CNSldXjb2rDl3JlRe0mD8= +github.com/brianvoe/gofakeit/v7 v7.0.4 h1:Mkxwz9jYg8Ad8NvT9HA27pCMZGFQo08MK6jD0QTKEww= +github.com/brianvoe/gofakeit/v7 v7.0.4/go.mod h1:QXuPeBw164PJCzCUZVmgpgHJ3Llj49jSLVkKPMtxtxA= +github.com/bytedance/sonic v1.12.1 h1:jWl5Qz1fy7X1ioY74WqO0KjAMtAGQs4sYnjiEBiyX24= +github.com/bytedance/sonic v1.12.1/go.mod h1:B8Gt/XvtZ3Fqj+iSKMypzymZxw/FVwgIGKzMzT9r/rk= +github.com/bytedance/sonic/loader v0.1.1/go.mod h1:ncP89zfokxS5LZrJxl5z0UJcsk4M4yY2JpfqGeCtNLU= +github.com/bytedance/sonic/loader v0.2.0 h1:zNprn+lsIP06C/IqCHs3gPQIvnvpKbbxyXQP1iU4kWM= +github.com/bytedance/sonic/loader v0.2.0/go.mod h1:ncP89zfokxS5LZrJxl5z0UJcsk4M4yY2JpfqGeCtNLU= +github.com/caddyserver/certmagic v0.19.2 h1:HZd1AKLx4592MalEGQS39DKs2ZOAJCEM/xYPMQ2/ui0= +github.com/caddyserver/certmagic v0.19.2/go.mod h1:fsL01NomQ6N+kE2j37ZCnig2MFosG+MIO4ztnmG/zz8= +github.com/charmbracelet/bubbles v0.20.0 h1:jSZu6qD8cRQ6k9OMfR1WlM+ruM8fkPWkHvQWD9LIutE= +github.com/charmbracelet/bubbles v0.20.0/go.mod h1:39slydyswPy+uVOHZ5x/GjwVAFkCsV8IIVy+4MhzwwU= +github.com/charmbracelet/bubbletea v1.3.4 h1:kCg7B+jSCFPLYRA52SDZjr51kG/fMUEoPoZrkaDHyoI= +github.com/charmbracelet/bubbletea v1.3.4/go.mod h1:dtcUCyCGEX3g9tosuYiut3MXgY/Jsv9nKVdibKKRRXo= +github.com/charmbracelet/colorprofile v0.2.3-0.20250311203215-f60798e515dc h1:4pZI35227imm7yK2bGPcfpFEmuY1gc2YSTShr4iJBfs= +github.com/charmbracelet/colorprofile v0.2.3-0.20250311203215-f60798e515dc/go.mod h1:X4/0JoqgTIPSFcRA/P6INZzIuyqdFY5rm8tb41s9okk= +github.com/charmbracelet/lipgloss v1.1.0 h1:vYXsiLHVkK7fp74RkV7b2kq9+zDLoEU4MZoFqR/noCY= +github.com/charmbracelet/lipgloss v1.1.0/go.mod h1:/6Q8FR2o+kj8rz4Dq0zQc3vYf7X+B0binUUBwA0aL30= +github.com/charmbracelet/x/ansi v0.8.0 h1:9GTq3xq9caJW8ZrBTe0LIe2fvfLR/bYXKTx2llXn7xE= +github.com/charmbracelet/x/ansi v0.8.0/go.mod h1:wdYl/ONOLHLIVmQaxbIYEC/cRKOQyjTkowiI4blgS9Q= +github.com/charmbracelet/x/cellbuf v0.0.13-0.20250311204145-2c3ea96c31dd h1:vy0GVL4jeHEwG5YOXDmi86oYw2yuYUGqz6a8sLwg0X8= +github.com/charmbracelet/x/cellbuf v0.0.13-0.20250311204145-2c3ea96c31dd/go.mod h1:xe0nKWGd3eJgtqZRaN9RjMtK7xUYchjzPr7q6kcvCCs= +github.com/charmbracelet/x/term v0.2.1 h1:AQeHeLZ1OqSXhrAWpYUtZyX1T3zVxfpZuEQMIQaGIAQ= +github.com/charmbracelet/x/term v0.2.1/go.mod h1:oQ4enTYFV7QN4m0i9mzHrViD7TQKvNEEkHUMCmsxdUg= +github.com/cloudwego/base64x v0.1.4 h1:jwCgWpFanWmN8xoIUHa2rtzmkd5J2plF/dnLS6Xd/0Y= +github.com/cloudwego/base64x v0.1.4/go.mod h1:0zlkT4Wn5C6NdauXdJRhSKRlJvmclQ1hhJgA0rcu/8w= +github.com/cloudwego/iasm v0.2.0 h1:1KNIy1I1H9hNNFEEH3DVnI4UujN+1zjpuk6gwHLTssg= +github.com/cloudwego/iasm v0.2.0/go.mod h1:8rXZaNYT2n95jn+zTI1sDr+IgcD2GVs0nlbbQPiEFhY= +github.com/cyphar/filepath-securejoin v0.3.4 h1:VBWugsJh2ZxJmLFSM06/0qzQyiQX2Qs0ViKrUAcqdZ8= +github.com/cyphar/filepath-securejoin v0.3.4/go.mod h1:8s/MCNJREmFK0H02MF6Ihv1nakJe4L/w3WZLHNkvlYM= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f h1:Y/CXytFA4m6baUTXGLOoWe4PQhGxaX0KpnayAqC48p4= +github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f/go.mod h1:vw97MGsxSvLiUE2X8qFplwetxpGLQrlU1Q9AUEIzCaM= +github.com/fatih/color v1.15.0 h1:kOqh6YHBtK8aywxGerMG2Eq3H6Qgoqeo13Bk2Mv/nBs= +github.com/fatih/color v1.15.0/go.mod h1:0h5ZqXfHYED7Bhv2ZJamyIOUej9KtShiJESRwBDUSsw= +github.com/gabriel-vasile/mimetype v1.4.5 h1:J7wGKdGu33ocBOhGy0z653k/lFKLFDPJMG8Gql0kxn4= +github.com/gabriel-vasile/mimetype v1.4.5/go.mod h1:ibHel+/kbxn9x2407k1izTA1S81ku1z/DlgOW2QE0M4= +github.com/gin-contrib/sse v0.1.0 h1:Y/yl/+YNO8GZSjAhjMsSuLt29uWRFHdHYUb5lYOV9qE= +github.com/gin-contrib/sse v0.1.0/go.mod h1:RHrZQHXnP2xjPF+u1gW/2HnVO7nvIa9PG3Gm+fLHvGI= +github.com/gin-contrib/zap v1.1.4 h1:xvxTybg6XBdNtcQLH3Tf0lFr4vhDkwzgLLrIGlNTqIo= +github.com/gin-contrib/zap v1.1.4/go.mod h1:7lgEpe91kLbeJkwBTPgtVBy4zMa6oSBEcvj662diqKQ= +github.com/gin-gonic/gin v1.10.0 h1:nTuyha1TYqgedzytsKYqna+DfLos46nTv2ygFy86HFU= +github.com/gin-gonic/gin v1.10.0/go.mod h1:4PMNQiOhvDRa013RKVbsiNwoyezlm2rm0uX/T7kzp5Y= +github.com/go-errors/errors v1.5.1 h1:ZwEMSLRCapFLflTpT7NKaAc7ukJ8ZPEjzlxt8rPN8bk= +github.com/go-errors/errors v1.5.1/go.mod h1:sIVyrIiJhuEF+Pj9Ebtd6P/rEYROXFi3BopGUQ5a5Og= +github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s= +github.com/go-playground/assert/v2 v2.2.0/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4= +github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA= +github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY= +github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY= +github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY= +github.com/go-playground/validator/v10 v10.22.0 h1:k6HsTZ0sTnROkhS//R0O+55JgM8C4Bx7ia+JlgcnOao= +github.com/go-playground/validator/v10 v10.22.0/go.mod h1:dbuPbCMFw/DrkbEynArYaCwl3amGuJotoKCe95atGMM= +github.com/goccy/go-json v0.10.3 h1:KZ5WoDbxAIgm2HNbYckL0se1fHD6rz5j4ywS6ebzDqA= +github.com/goccy/go-json v0.10.3/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M= +github.com/golang-jwt/jwt/v5 v5.0.0 h1:1n1XNM9hk7O9mnQoNBGolZvzebBQ7p93ULHRc28XJUE= +github.com/golang-jwt/jwt/v5 v5.0.0/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk= +github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= +github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= +github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= +github.com/google/uuid v1.3.0 h1:t6JiXgmwXMjEs8VusXIJk2BXHsn+wx8BZdTaoZ5fu7I= +github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/jinzhu/inflection v1.0.0 h1:K317FqzuhWc8YvSVlFMCCUb36O/S9MCKRDI7QkRKD/E= +github.com/jinzhu/inflection v1.0.0/go.mod h1:h+uFLlag+Qp1Va5pdKtLDYj+kHp5pxUVkryuEj+Srlc= +github.com/jinzhu/now v1.1.5 h1:/o9tlHleP7gOFmsnYNz3RGnqzefHA47wQpKrrdTIwXQ= +github.com/jinzhu/now v1.1.5/go.mod h1:d3SSVoowX0Lcu0IBviAWJpolVfI5UJVZZ7cO71lE/z8= +github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM= +github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo= +github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg= +github.com/klauspost/cpuid/v2 v2.0.12/go.mod h1:g2LTdtYhdyuGPqyWyv7qRAmj1WBqxuObKfj5c0PQa7c= +github.com/klauspost/cpuid/v2 v2.2.8 h1:+StwCXwm9PdpiEkPyzBXIy+M9KUb4ODm0Zarf1kS5BM= +github.com/klauspost/cpuid/v2 v2.2.8/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws= +github.com/knz/go-libedit v1.10.1/go.mod h1:MZTVkCWyz0oBc7JOWP3wNAzd002ZbM/5hgShxwh4x8M= +github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc= +github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw= +github.com/leodido/go-urn v1.4.0 h1:WT9HwE9SGECu3lg4d/dIA+jxlljEa1/ffXKmRjqdmIQ= +github.com/leodido/go-urn v1.4.0/go.mod h1:bvxc+MVxLKB4z00jd1z+Dvzr47oO32F/QSNjSBOlFxI= +github.com/libdns/libdns v0.2.1 h1:Wu59T7wSHRgtA0cfxC+n1c/e+O3upJGWytknkmFEDis= +github.com/libdns/libdns v0.2.1/go.mod h1:yQCXzk1lEZmmCPa857bnk4TsOiqYasqpyOEeSObbb40= +github.com/lucasb-eyer/go-colorful v1.2.0 h1:1nnpGOrhyZZuNyfu1QjKiUICQ74+3FNCN69Aj6K7nkY= +github.com/lucasb-eyer/go-colorful v1.2.0/go.mod h1:R4dSotOR9KMtayYi1e77YzuveK+i7ruzyGqttikkLy0= +github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA= +github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg= +github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= +github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= +github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= +github.com/mattn/go-localereader v0.0.1 h1:ygSAOl7ZXTx4RdPYinUpg6W99U8jWvWi9Ye2JC/oIi4= +github.com/mattn/go-localereader v0.0.1/go.mod h1:8fBrzywKY7BI3czFoHkuzRoWE9C+EiG4R1k4Cjx5p88= +github.com/mattn/go-runewidth v0.0.16 h1:E5ScNMtiwvlvB5paMFdw9p4kSQzbXFikJ5SQO6TULQc= +github.com/mattn/go-runewidth v0.0.16/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w= +github.com/mattn/go-sqlite3 v1.14.22 h1:2gZY6PC6kBnID23Tichd1K+Z0oS6nE/XwU+Vz/5o4kU= +github.com/mattn/go-sqlite3 v1.14.22/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y= +github.com/mholt/acmez v1.2.0 h1:1hhLxSgY5FvH5HCnGUuwbKY2VQVo8IU7rxXKSnZ7F30= +github.com/mholt/acmez v1.2.0/go.mod h1:VT9YwH1xgNX1kmYY89gY8xPJC84BFAisjo8Egigt4kE= +github.com/miekg/dns v1.1.55 h1:GoQ4hpsj0nFLYe+bWiCToyrBEJXkQfOOIvFGFy0lEgo= +github.com/miekg/dns v1.1.55/go.mod h1:uInx36IzPl7FYnDcMeVWxj9byh7DutNykX4G9Sj60FY= +github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= +github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg= +github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= +github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M= +github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= +github.com/muesli/ansi v0.0.0-20230316100256-276c6243b2f6 h1:ZK8zHtRHOkbHy6Mmr5D264iyp3TiX5OmNcI5cIARiQI= +github.com/muesli/ansi v0.0.0-20230316100256-276c6243b2f6/go.mod h1:CJlz5H+gyd6CUWT45Oy4q24RdLyn7Md9Vj2/ldJBSIo= +github.com/muesli/cancelreader v0.2.2 h1:3I4Kt4BQjOR54NavqnDogx/MIoWBFa0StPA8ELUXHmA= +github.com/muesli/cancelreader v0.2.2/go.mod h1:3XuTXfFS2VjM+HTLZY9Ak0l6eUKfijIfMUZ4EgX0QYo= +github.com/muesli/termenv v0.16.0 h1:S5AlUN9dENB57rsbnkPyfdGuWIlkmzJjbFf0Tf5FWUc= +github.com/muesli/termenv v0.16.0/go.mod h1:ZRfOIKPFDYQoDFF4Olj7/QJbW60Ol/kL1pU3VfY/Cnk= +github.com/oapi-codegen/nullable v1.1.0 h1:eAh8JVc5430VtYVnq00Hrbpag9PFRGWLjxR1/3KntMs= +github.com/oapi-codegen/nullable v1.1.0/go.mod h1:KUZ3vUzkmEKY90ksAmit2+5juDIhIZhfDl+0PwOQlFY= +github.com/pelletier/go-toml/v2 v2.2.2 h1:aYUidT7k73Pcl9nb2gScu7NSrKCSHIDE89b3+6Wq+LM= +github.com/pelletier/go-toml/v2 v2.2.2/go.mod h1:1t835xjRzz80PqgE6HHgN2JOsmgYu/h4qDAS4n929Rs= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/pquerna/otp v1.4.0 h1:wZvl1TIVxKRThZIBiwOOHOGP/1+nZyWBil9Y2XNEDzg= +github.com/pquerna/otp v1.4.0/go.mod h1:dkJfzwRKNiegxyNb54X/3fLwhCynbMspSyWKnvi1AEg= +github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc= +github.com/rivo/uniseg v0.4.7 h1:WUdvkW8uEhrYfLC4ZzdpI2ztxP1I582+49Oc5Mq64VQ= +github.com/rivo/uniseg v0.4.7/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= +github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= +github.com/stretchr/objx v0.5.2 h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY= +github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA= +github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= +github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= +github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= +github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= +github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg= +github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= +github.com/twitchyliquid64/golang-asm v0.15.1 h1:SU5vSMR7hnwNxj24w34ZyCi/FmDZTkS4MhqMhdFk5YI= +github.com/twitchyliquid64/golang-asm v0.15.1/go.mod h1:a1lVb/DtPvCB8fslRZhAngC2+aY1QWCk3Cedj/Gdt08= +github.com/ugorji/go/codec v1.2.12 h1:9LC83zGrHhuUA9l16C9AHXAqEV/2wBQ4nkvumAE65EE= +github.com/ugorji/go/codec v1.2.12/go.mod h1:UNopzCgEMSXjBc6AOMqYvWC1ktqTAfzJZUZgYf6w6lg= +github.com/wneessen/go-mail v0.4.2-0.20240324213705-f60ef348aa29 h1:qIrOwcLsH4ZabL4lM3wXKy+/61x2c8ytC4DBLoQd7Og= +github.com/wneessen/go-mail v0.4.2-0.20240324213705-f60ef348aa29/go.mod h1:zxOlafWCP/r6FEhAaRgH4IC1vg2YXxO0Nar9u0IScZ8= +github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e h1:JVG44RsyaB9T2KIHavMF/ppJZNG9ZpyihvCd0w101no= +github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e/go.mod h1:RbqR21r5mrJuqunuUZ/Dhy/avygyECGrLceyNeo4LiM= +github.com/yeqown/go-qrcode/v2 v2.2.4 h1:cXdYlrhzHzVAnJHiwr/T6lAUmS9MtEStjEZBjArrvnc= +github.com/yeqown/go-qrcode/v2 v2.2.4/go.mod h1:uHpt9CM0V1HeXLz+Wg5MN50/sI/fQhfkZlOM+cOTHxw= +github.com/yeqown/reedsolomon v1.0.0 h1:x1h/Ej/uJnNu8jaX7GLHBWmZKCAWjEJTetkqaabr4B0= +github.com/yeqown/reedsolomon v1.0.0/go.mod h1:P76zpcn2TCuL0ul1Fso373qHRc69LKwAw/Iy6g1WiiM= +github.com/zeebo/assert v1.1.0 h1:hU1L1vLTHsnO8x8c9KAR5GmM5QscxHg5RNU5z5qbUWY= +github.com/zeebo/assert v1.1.0/go.mod h1:Pq9JiuJQpG8JLJdtkwrJESF0Foym2/D9XMU5ciN/wJ0= +github.com/zeebo/blake3 v0.2.3 h1:TFoLXsjeXqRNFxSbk35Dk4YtszE/MQQGK10BH4ptoTg= +github.com/zeebo/blake3 v0.2.3/go.mod h1:mjJjZpnsyIVtVgTOSpJ9vmRE4wgDeyt2HU3qXvvKCaQ= +github.com/zeebo/pcg v1.0.1 h1:lyqfGeWiv4ahac6ttHs+I5hwtH/+1mrhlCtVNQM2kHo= +github.com/zeebo/pcg v1.0.1/go.mod h1:09F0S9iiKrwn9rlI5yjLkmrug154/YRW6KnnXVDM/l4= +go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto= +go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE= +go.uber.org/multierr v1.11.0 h1:blXXJkSxSSfBVBlC76pxqeO+LN3aDfLQo+309xJstO0= +go.uber.org/multierr v1.11.0/go.mod h1:20+QtiLqy0Nd6FdQB9TLXag12DsQkrbs3htMFfDN80Y= +go.uber.org/zap v1.27.0 h1:aJMhYGrd5QSmlpLMr2MftRKl7t8J8PTZPA732ud/XR8= +go.uber.org/zap v1.27.0/go.mod h1:GB2qFLM7cTU87MWRP2mPIjqfIDnGu+VIO4V/SdhGo2E= +golang.org/x/arch v0.9.0 h1:ub9TgUInamJ8mrZIGlBG6/4TqWeMszd4N8lNorbrr6k= +golang.org/x/arch v0.9.0/go.mod h1:FEVrYAQjsQXMVJ1nsMoVVXPZg6p2JE2mx8psSWTDQys= +golang.org/x/crypto v0.37.0 h1:kJNSjF/Xp7kU0iB2Z+9viTPMW4EqqsrywMXLJOOsXSE= +golang.org/x/crypto v0.37.0/go.mod h1:vg+k43peMZ0pUMhYmVAWysMK35e6ioLh3wB8ZCAfbVc= +golang.org/x/exp v0.0.0-20220909182711-5c715a9e8561 h1:MDc5xs78ZrZr3HMQugiXOAkSZtfTpbJLDr/lwfgO53E= +golang.org/x/exp v0.0.0-20220909182711-5c715a9e8561/go.mod h1:cyybsKvd6eL0RnXn6p/Grxp8F5bW7iYuBgsNCOHpMYE= +golang.org/x/mod v0.24.0 h1:ZfthKaKaT4NrhGVZHO1/WDTwGES4De8KtWO0SIbNJMU= +golang.org/x/mod v0.24.0/go.mod h1:IXM97Txy2VM4PJ3gI61r1YEk/gAj6zAHN3AdZt6S9Ww= +golang.org/x/net v0.34.0 h1:Mb7Mrk043xzHgnRM88suvJFwzVrRfHEHJEl5/71CKw0= +golang.org/x/net v0.34.0/go.mod h1:di0qlW3YNM5oh6GqDGQr92MyTozJPmybPK4Ev/Gm31k= +golang.org/x/sync v0.13.0 h1:AauUjRAJ9OSnvULf/ARrrVywoJDy0YS2AwQ98I37610= +golang.org/x/sync v0.13.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA= +golang.org/x/sys v0.0.0-20210809222454-d867a43fc93e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.32.0 h1:s77OFDvIQeibCmezSnk/q6iAfkdiQaJi4VzroCFrN20= +golang.org/x/sys v0.32.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= +golang.org/x/text v0.24.0 h1:dd5Bzh4yt5KYA8f9CJHCP4FB4D51c2c6JvN37xJJkJ0= +golang.org/x/text v0.24.0/go.mod h1:L8rBsPeo2pSS+xqN0d5u2ikmjtmoJbDBT1b7nHvFCdU= +golang.org/x/time v0.3.0 h1:rg5rLMjNzMS1RkNLzCG38eapWhnYLFYXDXj2gOlr8j4= +golang.org/x/time v0.3.0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/tools v0.29.0 h1:Xx0h3TtM9rzQpQuR4dKLrdglAmCEN5Oi+P74JdhdzXE= +golang.org/x/tools v0.29.0/go.mod h1:KMQVMRsVxU6nHCFXrBPhDB8XncLNLM0lIy/F14RP588= +google.golang.org/protobuf v1.34.2 h1:6xV6lTsCfpGD21XK49h7MhtcApnLqkfYgPcdHftf6hg= +google.golang.org/protobuf v1.34.2/go.mod h1:qYOHts0dSfpeUzUFpOMr/WGzszTmLH+DiWniOlNbLDw= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gorm.io/driver/sqlite v1.6.0 h1:WHRRrIiulaPiPFmDcod6prc4l2VGVWHz80KspNsxSfQ= +gorm.io/driver/sqlite v1.6.0/go.mod h1:AO9V1qIQddBESngQUKWL9yoH93HIeA1X6V633rBwyT8= +gorm.io/gorm v1.30.1 h1:lSHg33jJTBxs2mgJRfRZeLDG+WZaHYCk3Wtfl6Ngzo4= +gorm.io/gorm v1.30.1/go.mod h1:8Z33v652h4//uMA76KjeDH8mJXPm1QNCYrMeatR0DOE= +nullprogram.com/x/optparse v1.0.0/go.mod h1:KdyPE+Igbe0jQUrVfMqDMeJQIJZEuyV7pjYmp6pbG50= diff --git a/backend/install/installer.go b/backend/install/installer.go new file mode 100644 index 0000000..1932b61 --- /dev/null +++ b/backend/install/installer.go @@ -0,0 +1,502 @@ +package install + +import ( + "bufio" + "bytes" + "embed" + "fmt" + "html/template" + "os" + "os/exec" + "path/filepath" + "strings" + "time" +) + +//go:embed systemd.service +var installFiles embed.FS + +const ( + // Installation paths + installDir = "/opt/phishingclub" + binaryName = "phishingclub" + dataDir = "data" + + // User and group + serviceUser = "phishingclub" + serviceGroup = "phishingclub" + + // Service + serviceName = "phishingclub" +) + +// Install handles the complete installation process interactively +func Install() error { + return RunInteractiveInstall() +} + +// InstallNonInteractive handles the non-interactive installation process +func InstallNonInteractive() error { + if os.Geteuid() != 0 { + return fmt.Errorf("installation must be run as root") + } + + steps := []struct { + name string + fn func() error + }{ + {"check sqlite dependency", checkSQLiteDependency}, + {"create user and group", createUserAndGroup}, + {"create directories", createDirectories}, + {"install binary", installBinary}, + {"install systemd service", installSystemdService}, + {"set permissions", setPermissions}, + {"enable service", enableService}, + {"start service", startService}, + {"print info", outputCredentialsAndInfo}, + } + + for _, step := range steps { + fmt.Printf("Step: %s\n", step.name) + if err := step.fn(); err != nil { + return fmt.Errorf("%s: %w", step.name, err) + } + } + + fmt.Println() + fmt.Println("Installer completed successfully! 🐟") + fmt.Println() + fmt.Println("If the service is restarted before the first used is setup, the password will change! - Check the logs") + fmt.Println() + fmt.Println("# Tips") + fmt.Println("'journalctl -u phishingclub.service -f' to see logs") + fmt.Println("'systemctl status phishingclub' to check status of the service") + fmt.Println("") + fmt.Println() + + return nil +} + +// Update handles the update process of the application +func Update() error { + if os.Geteuid() != 0 { + return fmt.Errorf("update must be run as root") + } + + // Check if service exists + if err := checkServiceExists(); err != nil { + return fmt.Errorf("service check failed: %w", err) + } + + steps := []struct { + name string + fn func() error + }{ + {"stop service", stopService}, + {"backup current binary", backupCurrentBinary}, + {"update binary", updateBinary}, + {"start service", startService}, + } + + for _, step := range steps { + fmt.Printf("Step: %s\n", step.name) + if err := step.fn(); err != nil { + return fmt.Errorf("%s: %w", step.name, err) + } + } + + fmt.Println() + + fmt.Println("# Post-update Status Check") + // Give the service a moment to stabilize + time.Sleep(2 * time.Second) + + cmd := exec.Command("systemctl", "status", serviceName) + output, err := cmd.CombinedOutput() + if err != nil { + fmt.Println("⚠️ Warning: Service may not be running properly after update") + fmt.Printf("Service status output:\n%s\n", string(output)) + fmt.Printf("Check status with: systemctl status %s\n", serviceName) + return nil + } + + if !strings.Contains(string(output), "active (running)") { + fmt.Println("⚠️ Warning: Service is not in 'active' state after update") + fmt.Printf("Service status output:\n%s\n", string(output)) + return nil + } + + fmt.Println("✅ Service is running") + fmt.Println() + fmt.Println("Update completed! 🐟") + fmt.Println() + + return nil +} + +// checkServiceExists verifies that the service is installed +func checkServiceExists() error { + servicePath := filepath.Join("/etc/systemd/system", serviceName+".service") + if _, err := os.Stat(servicePath); os.IsNotExist(err) { + return fmt.Errorf("service is not installed. Please run --install first") + } + return nil +} + +// stopService stops the running service +func stopService() error { + cmd := exec.Command("systemctl", "stop", serviceName) + if output, err := cmd.CombinedOutput(); err != nil { + return fmt.Errorf("failed to stop service: %s, error: %w", string(output), err) + } + + // Wait a moment to ensure the service is fully stopped + time.Sleep(2 * time.Second) + return nil +} + +// backupCurrentBinary creates a backup of the current binary +func backupCurrentBinary() error { + currentBinary := filepath.Join(installDir, binaryName) + backupBinary := filepath.Join(installDir, binaryName+".backup") + // #nosec + input, err := os.ReadFile(currentBinary) + if err != nil { + return fmt.Errorf("failed to read current binary: %w", err) + } + + if err := os.WriteFile(backupBinary, input, 0600); err != nil { + return fmt.Errorf("failed to write backup binary: %w", err) + } + + return nil +} + +// updateBinary updates the binary with the new version +func updateBinary() error { + executable, err := os.Executable() + if err != nil { + return fmt.Errorf("failed to get executable path: %w", err) + } + // #nosec + input, err := os.ReadFile(executable) + if err != nil { + return fmt.Errorf("failed to read new binary: %w", err) + } + + binaryPath := filepath.Join(installDir, binaryName) + if err := os.WriteFile(binaryPath, input, 0600); err != nil { + return fmt.Errorf("failed to write new binary: %w", err) + } + + // Set proper ownership + if err := setPermissions(); err != nil { + return fmt.Errorf("failed to set permissions: %w", err) + } + + return nil +} + +func checkSQLiteDependency() error { + // Check if sqlite3 is installed + if _, err := exec.LookPath("sqlite3"); err != nil { + fmt.Println("SQLite3 is not installed. Attempting to install...") + + // Detect package manager and install sqlite + if err := installSQLite(); err != nil { + return fmt.Errorf("failed to install sqlite: %w", err) + } + } + + // Verify sqlite installation + cmd := exec.Command("sqlite3", "--version") + output, err := cmd.CombinedOutput() + if err != nil { + return fmt.Errorf("sqlite verification failed: %w", err) + } + + fmt.Printf("SQLite version: %s", output) + return nil +} + +func installSQLite() error { + // Detect the package manager and install sqlite + var cmd *exec.Cmd + + // Check for apt (Debian/Ubuntu) + if _, err := exec.LookPath("apt"); err == nil { + cmd = exec.Command("apt", "update") + err := cmd.Run() // Update package list + if err != nil { + fmt.Printf("ERR: %s\n", err) + } + cmd = exec.Command("apt", "install", "-y", "sqlite3") + } else if _, err := exec.LookPath("yum"); err == nil { + // Check for yum (RHEL/CentOS) + cmd = exec.Command("yum", "install", "-y", "sqlite") + } else if _, err := exec.LookPath("dnf"); err == nil { + // Check for dnf (Fedora) + cmd = exec.Command("dnf", "install", "-y", "sqlite") + } else if _, err := exec.LookPath("pacman"); err == nil { + // Check for pacman (Arch) + cmd = exec.Command("pacman", "-S", "--noconfirm", "sqlite") + } else { + return fmt.Errorf("no supported package manager found (apt, yum, dnf, or pacman)") + } + + output, err := cmd.CombinedOutput() + if err != nil { + return fmt.Errorf("failed to install sqlite: %s, error: %w", string(output), err) + } + + return nil +} + +// enableService enables the systemd service so it gets started on boot +func enableService() error { + if err := exec.Command("systemctl", "enable", serviceName).Run(); err != nil { + return fmt.Errorf("failed to enable service: %w", err) + } + return nil +} + +// startService starts the systemd service +func startService() error { + if err := exec.Command("systemctl", "start", serviceName).Run(); err != nil { + return fmt.Errorf("failed to start service: %w", err) + } + return nil +} + +func Uninstall() error { + if os.Geteuid() != 0 { + return fmt.Errorf("uninstallation must be run as root") + } + + // Display warning and confirmation prompt + fmt.Println("⚠️ WARNING: Uninstallation will remove ALL components of Phishing Club, including:") + fmt.Println(" • The application binary and its service") + fmt.Println(" • ALL configuration files") + fmt.Println(" • ALL data, including the database") + fmt.Println(" • The phishingclub user and group") + fmt.Println("\nThis operation CANNOT be undone!") + + reader := bufio.NewReader(os.Stdin) + fmt.Print("\nType 'YES' (all caps) to confirm uninstallation: ") + confirmation, _ := reader.ReadString('\n') + confirmation = strings.TrimSpace(confirmation) + + if confirmation != "YES" { + fmt.Println("Uninstallation cancelled.") + return fmt.Errorf("uninstallation cancelled by user") + } + + return performUninstall() +} + +// UninstallNonInteractive performs uninstallation without confirmation prompts +func UninstallNonInteractive() error { + if os.Geteuid() != 0 { + return fmt.Errorf("uninstallation must be run as root") + } + + return performUninstall() +} + +// performUninstall handles the actual uninstallation process +func performUninstall() error { + fmt.Println("Uninstalling Phishing Club...") + + // Stop and disable service + err := exec.Command("systemctl", "stop", serviceName).Run() + if err != nil { + fmt.Printf("Warning: Failed to stop service: %v\n", err) + // Continue with uninstallation + } + + err = exec.Command("systemctl", "disable", serviceName).Run() + if err != nil { + fmt.Printf("Warning: Failed to disable service: %v\n", err) + // Continue with uninstallation + } + + // Remove service file + servicePath := filepath.Join("/etc/systemd/system", serviceName+".service") + err = os.Remove(servicePath) + if err != nil && !os.IsNotExist(err) { + fmt.Printf("Warning: Failed to remove service unit file: %v\n", err) + // Continue with uninstallation + } + + // Reload systemd + err = exec.Command("systemctl", "daemon-reload").Run() + if err != nil { + fmt.Printf("Warning: Failed to reload systemctl daemon: %v\n", err) + // Continue with uninstallation + } + + // Remove installation directory + err = os.RemoveAll(installDir) + if err != nil && !os.IsNotExist(err) { + return fmt.Errorf("failed to remove install directory: %w", err) + } + + // Remove user and group + fmt.Printf("Removing user and group: %s\n", serviceUser) + err = exec.Command("userdel", serviceUser).Run() + if err != nil { + fmt.Printf("Warning: Failed to delete user %s: %v\n", serviceUser, err) + // Continue with uninstallation + } + + _ = exec.Command("groupdel", serviceGroup).Run() + // Group deletion errors are not critical + + fmt.Println("\n✅ Uninstallation completed successfully!") + fmt.Println("All Phishing Club components have been removed from your system.") + + return nil +} + +func createUserAndGroup() error { + // Check if group exists + if err := exec.Command("getent", "group", serviceGroup).Run(); err != nil { + cmd := exec.Command("groupadd", serviceGroup) + if err := cmd.Run(); err != nil { + return fmt.Errorf("failed to create group: %w", err) + } + } + + // Check if user exists + if err := exec.Command("getent", "passwd", serviceUser).Run(); err != nil { + cmd := exec.Command("useradd", + "-r", + "-g", serviceGroup, + "-s", "/bin/false", + serviceUser, + ) + if err := cmd.Run(); err != nil { + return fmt.Errorf("failed to create user: %w", err) + } + } + + return nil +} + +func createDirectories() error { + dirs := []string{ + installDir, + filepath.Join(installDir, dataDir), + } + + for _, dir := range dirs { + if err := os.MkdirAll(dir, 0750); err != nil { + return fmt.Errorf("failed to create directory %s: %w", dir, err) + } + } + + return nil +} + +func installBinary() error { + executable, err := os.Executable() + if err != nil { + return fmt.Errorf("failed to get executable path: %w", err) + } + // #nosec + input, err := os.ReadFile(executable) + if err != nil { + return fmt.Errorf("failed to read executable: %w", err) + } + + binaryPath := filepath.Join(installDir, binaryName) + // #nosec + if err := os.WriteFile(binaryPath, input, 0750); err != nil { + return fmt.Errorf("failed to write binary: %w", err) + } + + return nil +} + +func installSystemdService() error { + serviceTemplate, err := installFiles.ReadFile("systemd.service") + if err != nil { + return fmt.Errorf("failed to read service template: %w", err) + } + + // Create template data with all required fields + data := struct { + User string + Group string + InstallDir string + BinaryPath string + ConfigPath string + DataDir string + }{ + User: serviceUser, + Group: serviceGroup, + InstallDir: installDir, + BinaryPath: filepath.Join(installDir, binaryName), + ConfigPath: filepath.Join(installDir, "config.json"), + DataDir: filepath.Join(installDir, dataDir), + } + + // Parse and execute the template + tmpl, err := template.New("service").Parse(string(serviceTemplate)) + if err != nil { + return fmt.Errorf("failed to parse service template: %w", err) + } + + var buf bytes.Buffer + if err := tmpl.Execute(&buf, data); err != nil { + return fmt.Errorf("failed to execute service template: %w", err) + } + + servicePath := filepath.Join("/etc/systemd/system", serviceName+".service") + // #nosec + if err := os.WriteFile(servicePath, buf.Bytes(), 0644); err != nil { + return fmt.Errorf("failed to write service file: %w", err) + } + + if err := exec.Command("systemctl", "daemon-reload").Run(); err != nil { + return fmt.Errorf("failed to reload systemd: %w", err) + } + + return nil +} + +func setPermissions() error { + // #nosec + cmd := exec.Command("chown", "-R", + fmt.Sprintf("%s:%s", serviceUser, serviceGroup), + installDir, + ) + if err := cmd.Run(); err != nil { + return fmt.Errorf("failed to set ownership: %w", err) + } + + return nil +} + +func outputCredentialsAndInfo() error { + time.Sleep(3 * time.Second) + fmt.Println() + fmt.Println("<<< IMPORTANT >>>") + + steps := []string{ + "journalctl -u phishingclub.service -r -n 5000 --no-pager --output=cat | grep 'Username:' -m1", + "journalctl -u phishingclub.service -r -n 5000 --no-pager --output=cat | grep 'Password:' -m1", + "journalctl -u phishingclub.service -r -n 5000 --no-pager --output=cat | grep 'Phishing HTTPS' -m1 -B1 | tac", + "journalctl -u phishingclub.service -r -n 5000 --no-pager --output=cat | grep 'Phishing HTTP server' -m1 -B1 | tac", + "journalctl -u phishingclub.service -r -n 5000 --no-pager --output=cat | grep 'Admin server' -m1 -B1 | tac", + } + for _, t := range steps { + cmd := exec.Command("sh", "-c", t) + cmd.Stdout = os.Stdout + cmd.Stderr = os.Stderr + if err := cmd.Run(); err != nil { + return fmt.Errorf("failed to get all install information: %w", err) + } + } + return nil +} diff --git a/backend/install/interactive.go b/backend/install/interactive.go new file mode 100644 index 0000000..d52ffd0 --- /dev/null +++ b/backend/install/interactive.go @@ -0,0 +1,396 @@ +package install + +import ( + "fmt" + "os" + "path/filepath" + "strconv" + "strings" + + "github.com/phishingclub/phishingclub/config" + + "github.com/charmbracelet/bubbles/textinput" + tea "github.com/charmbracelet/bubbletea" + "github.com/charmbracelet/lipgloss" +) + +var ( + // Attractive UI styles with a cohesive color scheme + titleStyle = lipgloss.NewStyle(). + Foreground(lipgloss.Color("#FFFFFF")). + Background(lipgloss.Color("#0B3D91")). // NASA blue + Bold(true). + Padding(2, 2) + + inputStyle = lipgloss.NewStyle(). + Foreground(lipgloss.Color("#0B3D91")) + + focusedStyle = lipgloss.NewStyle(). + Foreground(lipgloss.Color("#FFFFFF")). + Background(lipgloss.Color("#1E88E5")). // Material blue + Bold(true) + + blurredStyle = lipgloss.NewStyle(). + Foreground(lipgloss.Color("#0B3D91")) + + cursorStyle = lipgloss.NewStyle(). + Foreground(lipgloss.Color("#FF9E43")) // Amber accent + + helpStyle = lipgloss.NewStyle(). + Italic(true). + Foreground(lipgloss.Color("#607D8B")) // Blue grey + + errorStyle = lipgloss.NewStyle(). + Foreground(lipgloss.Color("#F44336")). // Red + Bold(true) + + buttonStyle = lipgloss.NewStyle(). + Foreground(lipgloss.Color("#FFFFFF")). + Background(lipgloss.Color("#43A047")). // Green + Bold(true). + Padding(0, 3) +) + +// InputWithHelp extends textinput.Model to include a help text +type InputWithHelp struct { + textinput.Model + HelpText string +} + +// ConfigModel is the model for the tea app +type ConfigModel struct { + inputs []InputWithHelp + focusIndex int + err error + shouldInstall bool + config *config.Config +} + +// Init initializes the model +func (m ConfigModel) Init() tea.Cmd { + return textinput.Blink +} + +// Update handles updates +func (m ConfigModel) Update(msg tea.Msg) (tea.Model, tea.Cmd) { + switch msg := msg.(type) { + case tea.KeyMsg: + switch msg.String() { + case "ctrl+c", "esc": + return m, tea.Quit + + // Navigate between inputs with tab/shift+tab + case "tab", "shift+tab", "enter", "up", "down": + s := msg.String() + + // Did the user press enter while the submit button was focused? + if s == "enter" && m.focusIndex == len(m.inputs) { + // Validate config and set shouldInstall + var err error + m.shouldInstall = true + + // Apply the input values to configuration + err = m.applyConfig() + if err != nil { + m.err = err + m.shouldInstall = false + return m, nil + } + + return m, tea.Quit + } + + // Cycle indexes + if s == "up" || s == "shift+tab" { + m.focusIndex-- + } else { + m.focusIndex++ + } + + if m.focusIndex > len(m.inputs) { + m.focusIndex = 0 + } else if m.focusIndex < 0 { + m.focusIndex = len(m.inputs) + } + + cmds := make([]tea.Cmd, len(m.inputs)) + for i := 0; i < len(m.inputs); i++ { + if i == m.focusIndex { + // Set focused state + cmds[i] = m.inputs[i].Focus() + m.inputs[i].PromptStyle = focusedStyle + m.inputs[i].TextStyle = focusedStyle + } else { + // Remove focused state + m.inputs[i].Blur() + m.inputs[i].PromptStyle = blurredStyle + m.inputs[i].TextStyle = blurredStyle + } + } + + return m, tea.Batch(cmds...) + } + } + + // Handle character input + cmd := m.updateInputs(msg) + + return m, cmd +} + +func (m *ConfigModel) updateInputs(msg tea.Msg) tea.Cmd { + var cmds = make([]tea.Cmd, len(m.inputs)) + + // Only text inputs with Focus() set will respond + for i := range m.inputs { + m.inputs[i].Model, cmds[i] = m.inputs[i].Model.Update(msg) + } + + return tea.Batch(cmds...) +} + +func (m ConfigModel) View() string { + var b strings.Builder + + b.WriteString("\n") + b.WriteString(titleStyle.Render(" 🐟 Phishing Club Interactive Installer ")) + b.WriteString("\n\n") + + if m.err != nil { + b.WriteString(errorStyle.Render(fmt.Sprintf("Error: %s\n\n", m.err.Error()))) + } + + for i, input := range m.inputs { + b.WriteString(input.View()) + // Display help text for the focused input + if i == m.focusIndex { + b.WriteString("\n " + helpStyle.Render(input.HelpText)) + } + b.WriteString("\n") + } + + button := blurredStyle.Copy() + if m.focusIndex == len(m.inputs) { + button = buttonStyle + } + fmt.Fprintf(&b, "\n%s\n", button.Render(" Install ")) + + return b.String() +} + +// InitialModel creates the initial model for the tea app +func InitialModel(currentConfig *config.Config) ConfigModel { + // Setup text inputs + var inputs []InputWithHelp + var prompts = []struct { + prompt string + defaultValue string + placeholder string + description string + }{ + {"HTTP port", strconv.Itoa(config.DefaultProductionHTTPPhishingPort), "80", "Port for HTTP phishing server"}, + {"HTTPS port", strconv.Itoa(config.DefaultProductionHTTPSPhishingPort), "443", "Port for HTTPS phishing server"}, + {"Admin port", strconv.Itoa(config.DefaultProductionAdministrationPort), "0 (random port)", "Admin server port - can not be the same as the ports used by the phishing server"}, + {"Admin host", config.DefaultAdminHost, "localhost", "Admin server hostname - used for TLS certificate"}, + {"Use Auto TLS", config.DefaultAdminAutoTLSString, "true/false", "Use automated TLS for the admin service"}, + {"ACME email", config.DefaultACMEEmail, config.DefaultACMEEmail, "Email for Let's Encrypt notifications"}, + } + + for i, p := range prompts { + t := textinput.New() + t.Cursor.Style = cursorStyle + t.CharLimit = 64 + + // Configure each input + t.Placeholder = p.placeholder + t.PromptStyle = blurredStyle + t.TextStyle = blurredStyle + + // The first input is focused + if i == 0 { + t.PromptStyle = focusedStyle + t.TextStyle = focusedStyle + t.Focus() + } + + // Set the prompt with the default value displayed + t.Prompt = fmt.Sprintf("%s [%s]: ", p.prompt, p.defaultValue) + + // Create our custom input with help text + input := InputWithHelp{ + Model: t, + HelpText: p.description, + } + + inputs = append(inputs, input) + } + + return ConfigModel{ + inputs: inputs, + config: currentConfig, + } +} + +// applyConfig takes the input values and applies them to the config +func (m *ConfigModel) applyConfig() error { + // Get the input values or use defaults if empty + httpPort := getValueOrDefault(m.inputs[0].Value(), strconv.Itoa(config.DefaultProductionHTTPPhishingPort)) + httpsPort := getValueOrDefault(m.inputs[1].Value(), strconv.Itoa(config.DefaultProductionHTTPSPhishingPort)) + adminPort := getValueOrDefault(m.inputs[2].Value(), strconv.Itoa(config.DefaultProductionAdministrationPort)) + adminHost := getValueOrDefault(m.inputs[3].Value(), config.DefaultAdminHost) + autoTLS := getValueOrDefault(m.inputs[4].Value(), config.DefaultAdminAutoTLSString) + acmeEmail := getValueOrDefault(m.inputs[5].Value(), config.DefaultACMEEmail) + + // Convert ports to integers + httpPortInt, err := strconv.Atoi(httpPort) + if err != nil { + return fmt.Errorf("invalid HTTP port: %w", err) + } + + httpsPortInt, err := strconv.Atoi(httpsPort) + if err != nil { + return fmt.Errorf("invalid HTTPS port: %w", err) + } + + adminPortInt, err := strconv.Atoi(adminPort) + if err != nil { + return fmt.Errorf("invalid admin port: %w", err) + } + + // Validate port values + if httpPortInt <= 0 || httpPortInt > 65535 { + return fmt.Errorf("HTTP port must be between 1 and 65535") + } + if httpsPortInt <= 0 || httpsPortInt > 65535 { + return fmt.Errorf("HTTPS port must be between 1 and 65535") + } + if adminPortInt < 0 || adminPortInt > 65535 { + return fmt.Errorf("admin port must be between 0 and 65535") + } + + // Check for port conflicts + if httpPortInt == httpsPortInt { + return fmt.Errorf("HTTP and HTTPS ports cannot be the same") + } + if adminPortInt != 0 && (adminPortInt == httpPortInt || adminPortInt == httpsPortInt) { + return fmt.Errorf("admin port cannot be the same as HTTP or HTTPS ports") + } + + // Convert autoTLS to boolean + autoTLSBool := false + if strings.ToLower(autoTLS) == config.DefaultAdminAutoTLSString { + autoTLSBool = true + } + + // Set values in config + err = m.config.SetPhishingHTTPNetAddress(fmt.Sprintf("0.0.0.0:%d", httpPortInt)) + if err != nil { + return fmt.Errorf("failed to set HTTP address: %w", err) + } + + err = m.config.SetPhishingHTTPSNetAddress(fmt.Sprintf("0.0.0.0:%d", httpsPortInt)) + if err != nil { + return fmt.Errorf("failed to set HTTPS address: %w", err) + } + + err = m.config.SetAdminNetAddress(fmt.Sprintf("0.0.0.0:%d", adminPortInt)) + if err != nil { + return fmt.Errorf("failed to set admin address: %w", err) + } + + m.config.SetTLSHost(adminHost) + m.config.SetTLSAuto(autoTLSBool) + m.config.SetACMEEmail(acmeEmail) + + return nil +} + +// getValueOrDefault returns the value or the default if value is empty +func getValueOrDefault(value, defaultValue string) string { + if value == "" { + return defaultValue + } + return value +} + +func RunInteractiveInstall() error { + // First check if we're running as root + if os.Geteuid() != 0 { + return fmt.Errorf("installation must be run as root") + } + + // Create installation directories first + if err := createDirectories(); err != nil { + return fmt.Errorf("failed to create install directories: %w", err) + } + + // Get default configuration + conf := config.NewProductionDefaultConfig() + + // Run the tea program + p := tea.NewProgram(InitialModel(conf)) + model, err := p.Run() + if err != nil { + return fmt.Errorf("error running interactive installer: %w", err) + } + + // Get the final model + finalModel := model.(ConfigModel) + if !finalModel.shouldInstall { + return fmt.Errorf("installation cancelled") + } + + // Save the config to the installation directory + configPath := filepath.Join(installDir, "config.json") + err = finalModel.config.WriteToFile(configPath) + if err != nil { + return fmt.Errorf("failed to save configuration to %s: %w", configPath, err) + } + if err := os.Chmod(configPath, 0600); err != nil { + return fmt.Errorf("failed to set config file permissions: %w", err) + } + + fmt.Printf("Configuration saved to %s\n", configPath) + + // Now run the actual installation + err = InstallWithConfig(finalModel.config) + if err != nil { + return err + } + return nil +} + +// InstallWithConfig handles the installation using the provided configuration +func InstallWithConfig(conf *config.Config) error { + steps := []struct { + name string + fn func() error + }{ + {"check sqlite dependency", checkSQLiteDependency}, + {"create user and group", createUserAndGroup}, + {"create directories", createDirectories}, + {"install binary", installBinary}, + {"install systemd service", installSystemdService}, + {"set permissions", setPermissions}, + {"enable service", enableService}, + {"start service", startService}, + {"print info", outputCredentialsAndInfo}, + } + + for _, step := range steps { + fmt.Printf("Step: %s\n", step.name) + if err := step.fn(); err != nil { + return fmt.Errorf("%s: %w", step.name, err) + } + } + + fmt.Println() + fmt.Println("Installer completed successfully! 🐟") + fmt.Println() + fmt.Println("# Tips") + fmt.Println("'journalctl -u phishingclub.service -f' to see logs") + fmt.Println("'systemctl status phishingclub' to check status of the service") + fmt.Println("") + + return nil +} diff --git a/backend/install/systemd.service b/backend/install/systemd.service new file mode 100644 index 0000000..a708e04 --- /dev/null +++ b/backend/install/systemd.service @@ -0,0 +1,28 @@ +[Unit] +Description=Phishing Club +After=network.target +Wants=network.target + +[Service] +Type=simple +User={{.User}} +Group={{.Group}} +AmbientCapabilities=CAP_NET_BIND_SERVICE +WorkingDirectory={{.InstallDir}} +ExecStart={{.BinaryPath}} --config={{.ConfigPath}} --files={{.DataDir}} --systemd +PrivateTmp=true +NoNewPrivileges=true +ProtectSystem=full +ProtectHome=true +RestrictAddressFamilies=AF_INET AF_INET6 AF_UNIX +RestrictNamespaces=true +RestrictRealtime=true +RestrictSUIDSGID=true +MemoryDenyWriteExecute=true +Restart=always +RestartSec=5s +StartLimitBurst=3 # Allow max 3 restart attempts +StartLimitInterval=60s # Within 60 seconds + +[Install] +WantedBy=multi-user.target diff --git a/backend/log/development.go b/backend/log/development.go new file mode 100644 index 0000000..ac9a5ce --- /dev/null +++ b/backend/log/development.go @@ -0,0 +1,42 @@ +package log + +import ( + "go.uber.org/zap" + "go.uber.org/zap/zapcore" +) + +// TODO add a build tag to this + +// log is the global development logger for the application +// it is always in debug mode and should only be used for +// poor mans debugging and not committed when used +var Log *zap.SugaredLogger + +func init() { + // NewDevelopmentLogger factory for *zap.Logger with development settings + atom := zap.NewAtomicLevelAt(zap.DebugLevel) + c := zap.Config{ + Level: atom, + Development: true, + Encoding: "console", + EncoderConfig: zap.NewDevelopmentEncoderConfig(), + OutputPaths: []string{"stderr"}, + ErrorOutputPaths: []string{"stderr"}, + } + c.EncoderConfig.EncodeLevel = zapcore.CapitalColorLevelEncoder + logger, _ := c.Build() + Log = logger.Sugar() +} + +// Debugf logs a message at debug level +func Bug(args ...any) { + // for each arg do a printf with %v + for _, arg := range args { + Log.Debugf("-->( %++v )", arg) + } +} + +func Stop(args ...any) { + Bug(args...) + panic(0) +} diff --git a/backend/log/factory.go b/backend/log/factory.go new file mode 100644 index 0000000..f9ff549 --- /dev/null +++ b/backend/log/factory.go @@ -0,0 +1,64 @@ +package log + +import ( + "github.com/phishingclub/phishingclub/config" + "github.com/phishingclub/phishingclub/errs" + "go.uber.org/zap" + "go.uber.org/zap/zapcore" +) + +// NewDevelopmentLogger factory for *zap.Logger with development settings +func NewIntegrationTestLogger() (*zap.Logger, error) { + c := zap.NewProductionConfig() + c.EncoderConfig.EncodeLevel = zapcore.CapitalColorLevelEncoder + return c.Build() +} + +// NewDevelopmentLogger factory for *zap.Logger with development settings +func NewDevelopmentLogger(conf *config.Config) (*zap.Logger, *zap.AtomicLevel, error) { + atom := zap.NewAtomicLevelAt(zap.DebugLevel) + outPath := []string{"stderr"} + errorOutPath := []string{"stderr"} + if p := conf.LogPath; len(p) > 0 { + outPath = append(outPath, p) + } + if p := conf.ErrLogPath; len(p) > 0 { + errorOutPath = append(errorOutPath, p) + } + c := zap.Config{ + Level: atom, + Development: true, + Encoding: "console", + EncoderConfig: zap.NewDevelopmentEncoderConfig(), + OutputPaths: outPath, + ErrorOutputPaths: errorOutPath, + } + c.EncoderConfig.EncodeLevel = zapcore.CapitalColorLevelEncoder + logger, err := c.Build() + + return logger, &atom, errs.Wrap(err) +} + +// NewProductionLogger factory for *zap.Logger with production settings +func NewProductionLogger(conf *config.Config) (*zap.Logger, *zap.AtomicLevel, error) { + atom := zap.NewAtomicLevelAt(zap.InfoLevel) + + outPath := []string{"stderr"} + errorOutPath := []string{"stderr"} + if p := conf.LogPath; len(p) > 0 { + outPath = append(outPath, p) + } + if p := conf.ErrLogPath; len(p) > 0 { + errorOutPath = append(errorOutPath, p) + } + c := zap.Config{ + Level: atom, + Development: false, + Encoding: "json", + EncoderConfig: zap.NewProductionEncoderConfig(), + OutputPaths: outPath, + ErrorOutputPaths: errorOutPath, + } + logger, err := c.Build() + return logger, &atom, errs.Wrap(err) +} diff --git a/backend/main.go b/backend/main.go new file mode 100644 index 0000000..3d4cae7 --- /dev/null +++ b/backend/main.go @@ -0,0 +1,624 @@ +package main + +import ( + "context" + "flag" + "fmt" + golog "log" + "os" + "os/signal" + "path/filepath" + "strings" + "sync" + "syscall" + "time" + + _ "embed" + + ginzap "github.com/gin-contrib/zap" + "github.com/gin-gonic/gin" + "github.com/go-errors/errors" + "github.com/oapi-codegen/nullable" + "github.com/phishingclub/phishingclub/acme" + "github.com/phishingclub/phishingclub/app" + "github.com/phishingclub/phishingclub/build" + "github.com/phishingclub/phishingclub/cli" + "github.com/phishingclub/phishingclub/data" + "github.com/phishingclub/phishingclub/database" + "github.com/phishingclub/phishingclub/errs" + "github.com/phishingclub/phishingclub/install" + "github.com/phishingclub/phishingclub/model" + "github.com/phishingclub/phishingclub/repository" + "github.com/phishingclub/phishingclub/seed" + "github.com/phishingclub/phishingclub/sso" + "github.com/phishingclub/phishingclub/task" + "github.com/phishingclub/phishingclub/version" + "github.com/phishingclub/phishingclub/vo" + "go.uber.org/zap" + "go.uber.org/zap/zapcore" + "gorm.io/gorm" + gormLogger "gorm.io/gorm/logger" +) + +const ( + APP_NAME = "Phishing Club" +) + +var ( + flagInstall = flag.Bool("install", false, "Install as a systemd service (interactive)") + flagInstallNonInteractive = flag.Bool("install-non-interactive", false, "Install as a systemd service without interactive prompts") + flagUpdate = flag.Bool("update", false, "Update the application binary and restart the service") + flagUninstall = flag.Bool("uninstall", false, "Uninstall the application with confirmation prompt") + flagUninstallNonInteractive = flag.Bool("uninstall-non-interactive", false, "Uninstall the application without confirmation prompt") + flagSystemd = flag.Bool("systemd", false, "Indicates the application is running as a systemd service, this flag is only functional on the initial boot when seeding the database.") + flagVersion = flag.Bool("version", false, "Show version") + flagConfigPath = flag.String("config", "./config.json", "Path to config file") + flagFilePath = flag.String("files", "./data", "Path to save application data") + env = flag.Bool("env", false, "Outputs the available environment variables") + flagRecovery = flag.Bool("recover", false, "Used for interactive recovery of an account") +) + +func main() { + flag.Parse() + + if *env { + cli.OutputEnv() + return + } + + if *flagVersion { + cli.PrintVersion(APP_NAME, version.Get()) + return + } + + if *flagInstall { + if err := install.Install(); err != nil { + golog.Fatalf("Installation failed: %s", err) + } + + return + } + + if *flagInstallNonInteractive { + if err := install.InstallNonInteractive(); err != nil { + golog.Fatalf("Installation failed: %s", err) + } + return + } + + if *flagUninstall { + if err := install.Uninstall(); err != nil { + golog.Fatalf("Uninstallation failed: %s", err) + } + return + } + + if *flagUninstallNonInteractive { + if err := install.UninstallNonInteractive(); err != nil { + golog.Fatalf("Uninstallation failed: %s", err) + } + return + } + + if *flagUpdate { + if err := install.Update(); err != nil { + golog.Fatalf("Update failed: %s", err) + } + return + } + + buildMode := app.MODE_DEVELOPMENT + if build.Flags.Production { + buildMode = app.MODE_PRODUCTION + } + + // check if the files path ends with / else add it + if (*flagFilePath)[len(*flagFilePath)-1:] != "/" { + *flagFilePath = *flagFilePath + "/" + } + acmeCertPath := fmt.Sprintf("%scerts", *flagFilePath) + ownManagedTLSPath := fmt.Sprintf("%scerts/own-managed", *flagFilePath) + assetPath := fmt.Sprintf("%sassets", *flagFilePath) + attachmentsPath := fmt.Sprintf("%sattachments", *flagFilePath) + + // print banner and version + cli.PrintBanner() + cli.PrintVersion(APP_NAME, version.Get()) + // get conf + conf, err := app.SetupConfig( + buildMode, + *flagConfigPath, + ) + if err != nil { + golog.Fatalf("failed to config: %s", err) + } + // setup database connection + db, err := app.SetupDatabase(conf) + if err != nil { + golog.Fatalf("Failed to connect to database: %s\nDSN: %s", err, conf.Database().DSN) + } + logger, atomicLogger, err := app.SetupLogger(buildMode, conf) + if err != nil { + golog.Fatalf("failed to setup logger: %s", err) + } + defer func() { + _ = logger.Sync() + }() + // set log levels + err = setLogLevels(db, atomicLogger) + if err != nil { + // this could fail due to db not being seeded + db.Logger = db.Logger.LogMode(gormLogger.Silent) + atomicLogger.SetLevel(zap.InfoLevel) + } + // set license server url + licenseServer := data.GetCrmURL() + // output debug information + /* + wd, err := os.Getwd() + if err != nil { + logger.Fatalw("Failed to get working directory", "error", err) + } + usr, err := user.Current() + if err != nil { + logger.Fatalw("Failed to get current user", "error", err) + } + // setup configuration + logger.Debugw("debug", + "applicationMode", buildMode, + "working directory", wd, + "OS user", usr.Username, + "pathConfig", *flagConfigPath, + ) + */ + if p := conf.LogPath; len(p) > 0 { + logger.Debugw("using log file", "path", p) + } + if p := conf.ErrLogPath; len(p) > 0 { + logger.Debugw("using error log file", "path", p) + } + // gin is always set to production mode + if buildMode == app.MODE_DEVELOPMENT { + gin.SetMode(gin.ReleaseMode) + } else { + gin.SetMode(gin.ReleaseMode) + } + + // setup utils and repos + utils := app.NewUtils() + repositories := app.NewRepositories(db) + // run migrations and seeding, including development seeding + // Use systemd flag to indicate this was installed via systemd + usingSystemd := *flagInstall || *flagSystemd + err = seed.InitialInstallAndSeed(db, repositories, logger, usingSystemd) + if err != nil { + logger.Fatalw("Failed to run migrations and seeding", "error", err) + } + // setup logging again so it is according to the database + err = setLogLevels(db, atomicLogger) + if err != nil { + // this could fail due to db not being seeded + db.Logger = db.Logger.LogMode(gormLogger.Silent) + atomicLogger.SetLevel(zap.InfoLevel) + } + // setup cert magic for TLS cert handling + certMagicConfig, certMagicCache, err := acme.SetupCertMagic( + acmeCertPath+"/acme", + conf, + db, + logger, + ) + if err != nil { + logger.Errorw("failed to setup certmagic", "error", err) + return + } + // setup services, middleware and controllers + services := app.NewServices( + db, + repositories, + logger, + utils, + assetPath, + attachmentsPath, + ownManagedTLSPath, + buildMode, + certMagicConfig, + certMagicCache, + licenseServer, + ) + // get entra-id options and setup msal client + ssoOpt, err := services.SSO.GetSSOOptionWithoutAuth(context.Background()) + if err != nil { + logger.Errorw("failed to setup sso", "error", err) + return + } + if ssoOpt.Enabled { + services.SSO.MSALClient, err = sso.NewEntreIDClient(ssoOpt) + if err != nil && !errors.Is(err, errs.ErrSSODisabled) { + logger.Errorw("failed to setup msal client", "error", err) + return + } + + } + middlewares := app.NewMiddlewares( + 1, + 1, + conf, + services, + utils, + logger, + ) + controllers := app.NewControllers( + assetPath, + attachmentsPath, + repositories, + services, + logger, + atomicLogger, + utils, + db, + ) + // setup admin account + isInstalled, err := controllers.InitialSetup.IsInstalled(context.Background()) + if err != nil { + logger.Fatalw("failed to check if app is installed", "error", err) + } + if !isInstalled { + err := controllers.InitialSetup.HandleInitialSetup(context.Background()) + if err != nil { + logger.Fatalw("failed to handle the installers initial setup", "error", err) + } + } + // TODO run migrations for existing databases + + // interactive account recovery + if *flagRecovery { + interactiveAccountRecovery(repositories, utils) + return + } + // setup administration server + var adminRouter *gin.Engine + if !build.Flags.Production { + adminRouter = gin.Default() + } else { + adminRouter = gin.New() + adminRouter.Use(ginzap.GinzapWithConfig(logger.Desugar(), &ginzap.Config{ + TimeFormat: time.RFC3339, + UTC: true, + Context: ginzap.Fn(func(c *gin.Context) []zapcore.Field { + fields := []zapcore.Field{} + fields = append(fields, zap.String("host", c.Request.Host)) + fields = append(fields, zap.String("server", "admin")) + + return fields + }), + })) + adminRouter.Use(ginzap.RecoveryWithZap(logger.Desugar(), true)) + // dont trust x-forwarded-by by default + if len(conf.IPSecurity.TrustedProxies) > 0 { + err := adminRouter.SetTrustedProxies(conf.IPSecurity.TrustedProxies) + if err != nil { + logger.Fatalw("failed to set trusted proxies", "error", err) + } + } else { + err := adminRouter.SetTrustedProxies(nil) + if err != nil { + logger.Fatalw("failed to set trusted proxies", "error", err) + } + } + // trust specific headers + adminRouter.TrustedPlatform = conf.IPSecurity.TrustedIPHeader + logger.Debugw("admin IP security", + "admin_allowed", strings.Join(conf.IPSecurity.AdminAllowed, ","), + "trusted_proxies", strings.Join(conf.IPSecurity.TrustedProxies, ","), + "trusted_ip_header", conf.IPSecurity.TrustedIPHeader, + ) + } + adminRouter.Use(middlewares.IPLimiter) + adminServer := app.NewAdministrationServer( + adminRouter, + controllers, + middlewares, + logger, + certMagicConfig, + build.Flags.Production, + ) + adminStartupChannel, adminListener, err := adminServer.StartServer(conf) + if err != nil { + logger.Fatalw("Failed to start admin server", "error", err) + } + + adminStartupResult := <-adminStartupChannel + if !adminStartupResult.Success && adminStartupResult.Error != nil { + logger.Fatalw("Failed to start admin server", "error", adminStartupResult.Error) + } + + // update the config with the actual port if the port was 0 + if conf.AdminNetAddressPort() == 0 { + err := conf.SetAdminNetAddress(adminListener.Addr().String()) + if err != nil { + logger.Fatalw("failed to set admin net address", "error", err) + } + err = conf.WriteToFile(*flagConfigPath) + if err != nil { + logger.Fatalw("failed to write config", "error", err) + } + } + // startup message + cli.PrintServerStarted("Admin server", adminListener.Addr().String()) + // start the phishing servers (HTTP and HTTPS) + phishingServer := app.NewServer( + assetPath, + ownManagedTLSPath, + db, + controllers, + services, + repositories, + logger, + certMagicConfig, + ) + + var r *gin.Engine + if !build.Flags.Production { + r = gin.Default() + } else { + r = gin.New() + r.Use(ginzap.GinzapWithConfig(logger.Desugar(), &ginzap.Config{ + TimeFormat: time.RFC3339, + UTC: true, + Context: ginzap.Fn(func(c *gin.Context) []zapcore.Field { + fields := []zapcore.Field{} + fields = append(fields, zap.String("host", c.Request.Host)) + fields = append(fields, zap.String("server", "admin")) + + return fields + }), + })) + r.Use(ginzap.RecoveryWithZap(logger.Desugar(), true)) + } + + r.Use(ginzap.GinzapWithConfig(logger.Desugar(), &ginzap.Config{ + TimeFormat: time.RFC3339, + UTC: true, + Context: ginzap.Fn(func(c *gin.Context) []zapcore.Field { + fields := []zapcore.Field{} + fields = append(fields, zap.String("host", c.Request.Host)) + fields = append(fields, zap.String("server", "phishing")) + + return fields + }), + })) + r.Use(ginzap.RecoveryWithZap(logger.Desugar(), true)) + phishingServer.AssignRoutes(r) + // start the HTTP server + httpTestChan, httpListener, err := phishingServer.StartHTTP(r, conf) + if err != nil { + logger.Fatalw("failed to start phishing HTTP server", "error", err) + } + httpTestResult := <-httpTestChan + if !httpTestResult.Success && httpTestResult.Error != nil { + logger.Fatalw("failed to start phishing HTTP server", "error", httpTestResult.Error) + } + cli.PrintServerStarted("Phishing HTTP server", httpListener.Addr().String()) + // start the HTTPSserver + httpsTestChan, httpsListener, err := phishingServer.StartHTTPS(r, conf) + if err != nil { + logger.Fatalw("failed to start HTTPS phishing server", "error", err) + } + httpsTestResult := <-httpsTestChan + if !httpsTestResult.Success && httpsTestResult.Error != nil { + logger.Fatalw("failed to start HTTPS phishing server", "error", httpsTestResult.Error) + } + cli.PrintServerStarted("Phishing HTTPS server", httpsListener.Addr().String()) + + // start the task handler + systemSession, err := model.NewSystemSession() + if err != nil { + logger.Fatalw("Failed to load system user", "error", err) + } + daemon := task.Runner{ + CampaignService: services.Campaign, + UpdateService: services.Update, + Logger: logger, + } + + // start tasks runner + // let the system tasks run once before starting the normal work tasks + // this ensure that a license check is completed before attempting to send out + // e-mails as that would cancel the e-mail delivery. + daemonCtx, cancelDaemons := context.WithCancel(context.Background()) + var wg sync.WaitGroup + wg.Add(1) + go daemon.RunSystemTasks( + daemonCtx, + systemSession, + &wg, + ) + wg.Wait() + go daemon.Run( + daemonCtx, + systemSession, + ) + + // handle aborts and abort signals + + abort := make(chan struct{}) + abortSignalCh := make(chan os.Signal, 1) + signal.Notify(abortSignalCh, syscall.SIGINT, syscall.SIGTERM, syscall.SIGABRT) + + // listen for abort signals + go func() { + sig := <-abortSignalCh + logger.Warnw("Received abort signal - initiating graceful shutdown", + "signal", sig, + ) + + // Create context with timeout for shutdown + ctx, cancel := context.WithTimeout(context.Background(), 60*time.Second) + defer cancel() + + // Graceful shutdown for daemons + logger.Debugf("Stopping daemons") + cancelDaemons() + + // Graceful shutdown for admin server + logger.Debugf("Stopping administration server") + if err := adminServer.Server.Shutdown(ctx); err != nil { + logger.Errorw("Admin server shutdown error", "error", err) + } + + // Graceful shutdown for HTTP server + logger.Debugf("Stopping HTTP Phishing server") + if err := phishingServer.HTTPServer.Shutdown(ctx); err != nil { + logger.Errorw("HTTP server shutdown error", "error", err) + } + + // Graceful shutdown for HTTPS server + logger.Debugf("Stopping HTTPS Phishing server") + if err := phishingServer.HTTPSServer.Shutdown(ctx); err != nil { + logger.Errorw("HTTPS server shutdown error", "error", err) + } + + // Close database connections + sqlDB, err := db.DB() + if err != nil { + logger.Errorw("Error getting DB instance", "error", err) + } else { + if err := sqlDB.Close(); err != nil { + logger.Errorw("Error closing database", "error", err) + } + } + + logger.Info("Graceful shutdown completed") + close(abort) + }() + + logger.Debug("Waiting for abort signal") + <-abort +} + +func interactiveAccountRecovery(repositories *app.Repositories, utils *app.Utilities) { + // check if we are in the same folder as the binary is in + ex, err := os.Executable() + if err != nil { + _, _ = fmt.Printf("Error getting executable path: %s\n", err) + return + } + binPath := filepath.Dir(ex) + currentPath, err := os.Getwd() + if err != nil { + _, _ = fmt.Printf("Error getting current directory: %s\n", err) + return + } + if binPath != currentPath { + _, _ = fmt.Printf("Please run this command from the same directory as the binary (%s)\n", binPath) + return + } + + // get the username to recover + var user *model.User + for user == nil { + account := "" + _, _ = fmt.Print("Enter account username: ") + _, _ = fmt.Scanln(&account) + username, err := vo.NewUsername(account) + if err != nil { + _, _ = fmt.Println("Invalid username") + continue + } + user, err = repositories.User.GetByUsername( + context.TODO(), + username, + &repository.UserOption{}, + ) + if err != nil { + _, _ = fmt.Printf("Could not find username: %s\n", err) + continue + } + _, _ = fmt.Println("User found") + } + uid := user.ID.MustGet() + for { + passwordInput := "" + passwordConfirmInput := "" + _, _ = fmt.Print("New password: ") + _, _ = fmt.Scanln(&passwordInput) + _, _ = fmt.Print("Confirm password: ") + _, _ = fmt.Scanln(&passwordConfirmInput) + if passwordInput != passwordConfirmInput { + _, _ = fmt.Println("Repeated password does not match") + continue + } + newPassword, err := vo.NewReasonableLengthPassword(passwordInput) + if err != nil { + _, _ = fmt.Printf("Error in password: %s\n", err) + continue + } + hash, err := utils.PasswordHasher.Hash(newPassword.String()) + if err != nil { + _, _ = fmt.Printf("Failed to hash password: %s\n", err) + continue + } + err = repositories.User.UpdatePasswordHashByID( + context.TODO(), + &uid, + hash, + ) + if err != nil { + _, _ = fmt.Printf("Failed to update password: %s\n", err) + continue + } + _, _ = fmt.Println("Password updated") + break + } + // remove any SSO or TOTP related data + user.SSOID = nullable.NewNullableWithValue("") + err = repositories.User.RemoveTOTP(context.TODO(), &uid) + if err != nil { + _, _ = fmt.Println("Failed to remove TOTP:", err) + } + err = repositories.User.UpdateUserToNoSSO(context.TODO(), &uid) + if err != nil { + _, _ = fmt.Println("Failed to remove TOTP:", err) + } +} + +func setLogLevels(db *gorm.DB, atomicLogger *zap.AtomicLevel) error { + // set log levels from DB for logger and db logger + var dbLogLevel database.Option + res := db. + Where("key = ?", data.OptionKeyDBLogLevel). + First(&dbLogLevel) + + if res.Error != nil && !errors.Is(res.Error, gorm.ErrRecordNotFound) { + return fmt.Errorf("failed to get DB log level: %w", res.Error) + } + switch dbLogLevel.Value { + case "silent": + db.Logger = db.Logger.LogMode(gormLogger.Silent) + case "info": + db.Logger = db.Logger.LogMode(gormLogger.Info) + case "warn": + db.Logger = db.Logger.LogMode(gormLogger.Warn) + case "error": + db.Logger = db.Logger.LogMode(gormLogger.Error) + } + var logLevel database.Option + res = db. + Where("key = ?", data.OptionKeyLogLevel). + First(&logLevel) + if res.Error != nil && !errors.Is(res.Error, gorm.ErrRecordNotFound) { + return fmt.Errorf("failed to get log level: %w", res.Error) + } + switch logLevel.Value { + case "debug": + atomicLogger.SetLevel(zap.DebugLevel) + case "info": + atomicLogger.SetLevel(zap.InfoLevel) + case "warn": + atomicLogger.SetLevel(zap.WarnLevel) + case "error": + atomicLogger.SetLevel(zap.ErrorLevel) + } + return nil +} diff --git a/backend/middleware/ipFilter.go b/backend/middleware/ipFilter.go new file mode 100644 index 0000000..8f6f654 --- /dev/null +++ b/backend/middleware/ipFilter.go @@ -0,0 +1,57 @@ +package middleware + +import ( + "net" + "net/http" + "strings" + + "github.com/gin-gonic/gin" + "github.com/phishingclub/phishingclub/config" + "go.uber.org/zap" +) + +func NewAllowIPMiddleware(conf *config.Config, logger *zap.SugaredLogger) gin.HandlerFunc { + return func(c *gin.Context) { + // If no IP restrictions are configured, allow all + if len(conf.IPSecurity.AdminAllowed) == 0 { + c.Next() + return + } + c.RemoteIP() + clientIP := c.ClientIP() + allowed := false + for _, allowedIP := range conf.IPSecurity.AdminAllowed { + // check if the allowed entry is a CIDR + if strings.Contains(allowedIP, "/") { + _, ipNet, err := net.ParseCIDR(allowedIP) + if err != nil { + logger.Errorw("Invalid CIDR in allowed IPs", + "cidr", allowedIP, + "error", err) + continue + } + + ip := net.ParseIP(clientIP) + if ipNet.Contains(ip) { + allowed = true + break + } + } else { + // Direct IP comparison + if clientIP == allowedIP { + allowed = true + break + } + } + } + + if !allowed { + logger.Infow("blocked unauthorized IP access attempt", + "ip", clientIP) + c.AbortWithStatus(http.StatusForbidden) + return + } + + c.Next() + } +} diff --git a/backend/middleware/ratelimiter.go b/backend/middleware/ratelimiter.go new file mode 100644 index 0000000..91512cb --- /dev/null +++ b/backend/middleware/ratelimiter.go @@ -0,0 +1,83 @@ +package middleware + +import ( + "net/http" + "sync" + "time" + + "github.com/gin-gonic/gin" + "golang.org/x/time/rate" +) + +// NewIPRateLimiterMiddleware creates a middleware that limits the number of requests per IP +// limit is the number of requests per second +// burst is the maximum burst size, the maximum number of requests that can be made in a burst without being limited +func NewIPRateLimiterMiddleware(limit float64, burst int) gin.HandlerFunc { + ipLimiter := NewKeyRateLimiter(rate.Limit(limit), burst, 10*time.Minute) + return func(c *gin.Context) { + limiter := ipLimiter.GetLimiter(c.ClientIP()) + if !limiter.Allow() { + c.AbortWithStatus(http.StatusTooManyRequests) + return + } + c.Next() + } +} + +//const cleanupInterval = 1 * time.Minute +//const entryExpiration = 10 * time.Minute + +// KeyRateLimiter is a rate limiter for key such as username, email or IP +type KeyRateLimiter struct { + // ips is a map of key to rate limit + key sync.Map + // limiter is the rate limit, e.g. 1 request per seconds + limiter rate.Limit + // burst is the maximum burst size, the maximum number of requests that can be made in a burst without being limited + burst int + // cleanupInterval is the interval at which the expired keys are cleaned up + cleanupInterval time.Duration +} + +// NewKeyRateLimiter creates a new key rate limiter +// limiter is the rate limit, e.g. 1 request per seconds +// burst is the maximum burst size, the maximum number of requests that can be made in a burst without being limited +func NewKeyRateLimiter( + limiter rate.Limit, + burst int, + cleanupInterval time.Duration, +) *KeyRateLimiter { + rl := &KeyRateLimiter{ + limiter: limiter, + burst: burst, + } + go rl.cleanup() + return rl +} + +// cleanup cleans up the expired keys, this is to avoid +// memory leaking through the sync.Map when the key is not used anymore +func (r *KeyRateLimiter) cleanup() { + for range time.Tick(r.cleanupInterval) { + now := time.Now() + r.key.Range(func(key, value interface{}) bool { + expirationTime := value.(time.Time) + if now.After(expirationTime) { + r.key.Delete(key) + } + return true + }) + } +} + +// GetLimiter gets the limiter for an key or creates one if it does not exist +func (r *KeyRateLimiter) GetLimiter(key string) *rate.Limiter { + value, exists := r.key.Load(key) + if exists { + return value.(*rate.Limiter) + } + + limiter := rate.NewLimiter(r.limiter, r.burst) + r.key.Store(key, limiter) + return limiter +} diff --git a/backend/middleware/session.go b/backend/middleware/session.go new file mode 100644 index 0000000..8a695fc --- /dev/null +++ b/backend/middleware/session.go @@ -0,0 +1,114 @@ +package middleware + +import ( + "crypto/sha256" + "crypto/subtle" + "time" + + "github.com/gin-gonic/gin" + "github.com/google/uuid" + "github.com/phishingclub/phishingclub/api" + "github.com/phishingclub/phishingclub/controller" + "github.com/phishingclub/phishingclub/data" + "github.com/phishingclub/phishingclub/model" + "github.com/phishingclub/phishingclub/service" + "go.uber.org/zap" +) + +// NewSessionHandler creates a middleware that authenticates the user +// by checking it has a session, and if it does, it extends the session and puts +// the user and the session in the gin context. +// if the user does not have a session or must renew password, it returns an unauthorized response. +// if the request contains a valid user API key, the entire session handling is skipped +func NewSessionHandler( + sessionService *service.Session, + userService *service.User, + responseHandler api.JSONResponseHandler, + logger *zap.SugaredLogger, +) gin.HandlerFunc { + return func(c *gin.Context) { + isValidAPISession := handleAPISession(c, userService, logger) + if isValidAPISession { + return + } + s, err := sessionService.GetAndExtendSession(c) + if err != nil { + // errors are logged in service + _ = err + responseHandler.Unauthorized(c) + return + } + user := s.User + if user == nil { + logger.Error("user not found in session") + responseHandler.Unauthorized(c) + return + } + controller.SetSessionInGinContext(c, s) + c.Next() + } +} + +// handleAPISession handles if there is a API token in the request header +// returns true if this was a valid API session request +func handleAPISession( + c *gin.Context, + userService *service.User, + logger *zap.SugaredLogger, +) bool { + if headerAPIKey := c.Request.Header.Get(data.APIHeaderKey); len(headerAPIKey) > 0 { + // to check API apiUsers in constant time, we have to retrieve them all + // hash them all and constant time check. + apiUsers, err := userService.GetAllAPIKeysSHA256(c) + if err != nil { + logger.Error("failed to get all api key hashes") + // responseHandler.BadRequest(c) + return false + } + incomingHash := sha256.Sum256([]byte(headerAPIKey)) + found := false + // Must check ALL keys in constant time + var rApiUser *model.APIUser + for _, apiUser := range apiUsers { + if subtle.ConstantTimeCompare(incomingHash[:], apiUser.APIKeyHash[:]) == 1 { + found = true + rApiUser = apiUser + break + } + } + if !found { + logger.Debug("API key not found") + // responseHandler.Unauthorized(c) + return false + } + // get user + systemService, err := model.NewSystemSession() + if err != nil { + logger.Error("failed to get system user") + return false + } + user, err := userService.GetByID(c, systemService, rApiUser.ID) + if err != nil { + logger.Error("failed to get user from API token") + return false + } + now := time.Now() + t := now.Add(time.Duration(1 * time.Minute)).UTC() + expiresAt := &t + maxAgeAt := &t + sid := uuid.MustParse(data.APISessionID) + session := &model.Session{ + ID: &sid, + ExpiresAt: expiresAt, + MaxAgeAt: maxAgeAt, + IP: c.ClientIP(), + User: user, + IsUserLoaded: true, + IsAPITokenRequest: true, + } + controller.SetSessionInGinContext(c, session) + c.Next() + return true + } + return false +} diff --git a/backend/model/README.md b/backend/model/README.md new file mode 100644 index 0000000..6a0bc51 --- /dev/null +++ b/backend/model/README.md @@ -0,0 +1,19 @@ +# Description +The `model` folder represents the core data structures. It includes: + +- Factories for creating and initializing entities +- Type definitions for all domain objects +- Validation logic to ensure data integrity +- Custom marshalling/unmarshalling for JSON handling +- Request/Response structures for API endpoints +- Helper methods for data transformation +- Business rules and constraints +- Data transfer objects (DTOs) for external communication + +The members mainly use `nullable.Nullable` to convey if a field is set so we know +if it should be updated etc. + +The structures in this folder serve as the contract between the API layer and the database layer, handling all necessary data transformations and validations before persistence or transmission. + +Files affixed with `View` are read-only models meant only for outgoing API data, +this could be joined data from a database. diff --git a/backend/model/allowDeny.go b/backend/model/allowDeny.go new file mode 100644 index 0000000..67b9090 --- /dev/null +++ b/backend/model/allowDeny.go @@ -0,0 +1,119 @@ +package model + +import ( + "fmt" + "net" + "time" + + "github.com/go-errors/errors" + + "github.com/google/uuid" + "github.com/oapi-codegen/nullable" + "github.com/phishingclub/phishingclub/errs" + "github.com/phishingclub/phishingclub/validate" + "github.com/phishingclub/phishingclub/vo" +) + +// AllowDeny is a model for allow deny listing +type AllowDeny struct { + ID nullable.Nullable[uuid.UUID] `json:"id"` + CreatedAt *time.Time `json:"createdAt"` + UpdatedAt *time.Time `json:"updatedAt"` + Name nullable.Nullable[vo.String127] `json:"name"` + Cidrs nullable.Nullable[vo.IPNetSlice] `json:"cidrs"` + Allowed nullable.Nullable[bool] `json:"allowed"` + CompanyID nullable.Nullable[uuid.UUID] `json:"companyID"` +} + +// Validate checks if the allow deny list has a valid state +func (r *AllowDeny) Validate() error { + if err := validate.NullableFieldRequired("name", r.Name); err != nil { + return err + } + if err := validate.NullableFieldRequired("cidrs", r.Cidrs); err != nil { + return err + } + if v := r.Cidrs.MustGet(); len(v) == 0 { + return errs.NewValidationError( + errors.New("cidrs must include atleast one CIDR"), + ) + } + return nil +} + +// ToDBMap converts the fields that can be stored or updated to a map +// if the value is nullable and not set, it is not included +// if the value is nullable and set, it is included, if it is null, it is set to nil +func (r *AllowDeny) ToDBMap() map[string]any { + m := map[string]any{} + if r.Name.IsSpecified() { + m["name"] = nil + if name, err := r.Name.Get(); err == nil { + m["name"] = name.String() + } + } + if r.Cidrs.IsSpecified() { + m["cidrs"] = nil + if cidrs, err := r.Cidrs.Get(); err == nil { + cidrsStr := "" + cidrsLen := len(cidrs) + for i, cidr := range cidrs { + if i == cidrsLen { + cidrsStr += fmt.Sprintf("%s", cidr.String()) + + } else { + cidrsStr += fmt.Sprintf("%s\n", cidr.String()) + } + } + m["cidrs"] = cidrsStr + } + } + if r.Allowed.IsSpecified() { + m["allowed"] = nil + if allowed, err := r.Allowed.Get(); err == nil { + m["allowed"] = allowed + } + } + if r.CompanyID.IsSpecified() { + if r.CompanyID.IsNull() { + m["company_id"] = nil + } else { + m["company_id"] = r.CompanyID.MustGet() + } + } + + return m +} + +func (r *AllowDeny) IsIPAllowed(ip string) (bool, error) { + isTypeAllowList := r.Allowed.MustGet() + cidrs, err := r.Cidrs.Get() + if err != nil { + return false, errs.Wrap(err) + } + + netIP := net.ParseIP(ip) + if netIP == nil { + return false, fmt.Errorf("invalid ip address: %s", ip) + } + + for _, cidr := range cidrs { + isInRange := cidr.Contains(netIP) + // if allow list and ip is within range + if isTypeAllowList && isInRange { + return true, nil + } + // if deny list and ip is within range + if !isTypeAllowList && isInRange { + return false, nil + } + } + + // If this is an allow list and we didn't find the IP, it's not allowed + if isTypeAllowList { + return false, nil + } + + // If this is a deny list and we didn't find the IP, it is allowed + return true, nil +} diff --git a/backend/model/apiSender.go b/backend/model/apiSender.go new file mode 100644 index 0000000..8988a01 --- /dev/null +++ b/backend/model/apiSender.go @@ -0,0 +1,286 @@ +package model + +import ( + "encoding/json" + "fmt" + "strings" + "time" + + "github.com/go-errors/errors" + + "github.com/google/uuid" + "github.com/oapi-codegen/nullable" + "github.com/phishingclub/phishingclub/validate" + "github.com/phishingclub/phishingclub/vo" +) + +// APISender is a API sender +type APISender struct { + ID nullable.Nullable[uuid.UUID] `json:"id"` + CreatedAt *time.Time `json:"createdAt"` + UpdatedAt *time.Time `json:"updatedAt"` + CompanyID nullable.Nullable[uuid.UUID] `json:"companyID"` + Name nullable.Nullable[vo.String64] `json:"name"` + APIKey nullable.Nullable[vo.OptionalString255] `json:"apiKey"` + CustomField1 nullable.Nullable[vo.OptionalString255] `json:"customField1"` + CustomField2 nullable.Nullable[vo.OptionalString255] `json:"customField2"` + CustomField3 nullable.Nullable[vo.OptionalString255] `json:"customField3"` + CustomField4 nullable.Nullable[vo.OptionalString255] `json:"customField4"` + RequestMethod nullable.Nullable[vo.HTTPMethod] `json:"requestMethod"` + RequestURL nullable.Nullable[vo.String255] `json:"requestURL"` + RequestHeaders nullable.Nullable[APISenderHeaders] `json:"requestHeaders"` + RequestBody nullable.Nullable[vo.OptionalString1MB] `json:"requestBody"` + ExpectedResponseStatusCode nullable.Nullable[int] `json:"expectedResponseStatusCode"` + ExpectedResponseHeaders nullable.Nullable[APISenderHeaders] `json:"expectedResponseHeaders"` + ExpectedResponseBody nullable.Nullable[vo.OptionalString1MB] `json:"expectedResponseBody"` +} + +// Validate checks if the API sender has a valid state +func (a *APISender) Validate() error { + if err := validate.NullableFieldRequired("name", a.Name); err != nil { + return err + } + if err := validate.NullableFieldRequired("requestMethod", a.RequestMethod); err != nil { + return err + } + if err := validate.NullableFieldRequired("requestURL", a.RequestURL); err != nil { + return err + } + // one of the following is required + if (!a.ExpectedResponseStatusCode.IsSpecified() || a.ExpectedResponseStatusCode.IsNull()) && + !isSpecifiedStringWithContent(a.ExpectedResponseBody) && + !isSpecifiedStringWithContent(a.ExpectedResponseHeaders) { + return validate.WrapErrorWithField(errors.New("expectedResponseStatusCode, expectedResponseBody or expectedResponseHeaders must be supplied"), "Missing field") + } + return nil +} + +func isSpecifiedStringWithContent[T fmt.Stringer](s nullable.Nullable[T]) bool { + return s.IsSpecified() && !s.IsNull() && s.MustGet().String() != "" +} + +// ToDBMap converts the fields that can be stored or updated to a map +// if the value is nullable and not set, it is not included +// if the value is nullable and set, it is included, if it is null, it is set to nil +func (a *APISender) ToDBMap() map[string]interface{} { + m := map[string]interface{}{} + if a.Name.IsSpecified() { + m["name"] = nil + if name, err := a.Name.Get(); err == nil { + m["name"] = name.String() + } + } + if a.CompanyID.IsSpecified() { + if a.CompanyID.IsNull() { + m["company_id"] = nil + } else { + m["company_id"] = a.CompanyID.MustGet() + } + } + if a.APIKey.IsSpecified() { + m["api_key"] = nil + if apiKey, err := a.APIKey.Get(); err == nil { + m["api_key"] = apiKey.String() + } + } + if a.CustomField1.IsSpecified() { + m["custom_field1"] = nil + if customField1, err := a.CustomField1.Get(); err == nil { + m["custom_field1"] = customField1.String() + } + } + if a.CustomField2.IsSpecified() { + m["custom_field2"] = nil + if customField2, err := a.CustomField2.Get(); err == nil { + m["custom_field2"] = customField2.String() + } + } + if a.CustomField3.IsSpecified() { + m["custom_field3"] = nil + if customField3, err := a.CustomField3.Get(); err == nil { + m["custom_field3"] = customField3.String() + } + } + if a.CustomField4.IsSpecified() { + m["custom_field4"] = nil + if customField4, err := a.CustomField4.Get(); err == nil { + m["custom_field4"] = customField4.String() + } + } + if a.RequestMethod.IsSpecified() { + m["request_method"] = nil + if requestMethod, err := a.RequestMethod.Get(); err == nil { + m["request_method"] = requestMethod.String() + } + } + if a.RequestURL.IsSpecified() { + m["request_url"] = nil + if requestURL, err := a.RequestURL.Get(); err == nil { + m["request_url"] = requestURL.String() + } + } + if a.RequestHeaders.IsSpecified() { + m["request_headers"] = nil + if requestHeaders, err := a.RequestHeaders.Get(); err == nil { + m["request_headers"] = requestHeaders.String() + } + } + if a.RequestBody.IsSpecified() { + m["request_body"] = nil + if requestBody, err := a.RequestBody.Get(); err == nil { + m["request_body"] = requestBody.String() + } + } + if a.ExpectedResponseStatusCode.IsSpecified() { + m["expected_response_status_code"] = nil + if expectedResponseStatusCode, err := a.ExpectedResponseStatusCode.Get(); err == nil { + m["expected_response_status_code"] = expectedResponseStatusCode + } + } + if a.ExpectedResponseHeaders.IsSpecified() { + m["expected_response_headers"] = nil + if expectedResponseHeaders, err := a.ExpectedResponseHeaders.Get(); err == nil { + m["expected_response_headers"] = expectedResponseHeaders.String() + } + } + if a.ExpectedResponseBody.IsSpecified() { + m["expected_response_body"] = nil + if expectedResponseBody, err := a.ExpectedResponseBody.Get(); err == nil { + m["expected_response_body"] = expectedResponseBody.String() + } + } + return m +} + +// TODO should the rest of the code be moved to value object +type HTTPHeader struct { + Key string `json:"key"` + Value string `json:"value"` +} + +// APISenderHeaders is a header for a API sender +type APISenderHeaders struct { + Headers []*HTTPHeader +} + +// NewAPISenderHeader creates a new APISenderHeader +// it takes a newline separated string with headers of the format .+: .+ (key: value) +func NewAPISenderHeader(headers string) (*APISenderHeaders, error) { + headers = strings.TrimSpace(headers) + if headers == "" { + return &APISenderHeaders{}, nil + } + // split the headers + lines := strings.Split(headers, "\n") + // if there is a single header + if len(lines) == 0 { + return &APISenderHeaders{ + Headers: []*HTTPHeader{}, + }, nil + } + headerLines := []*HTTPHeader{} + for _, line := range lines { + // split the key value + parts := strings.Split(line, ":") + // there should be atleast 2 parts, key and value + if len(parts) < 2 { + return nil, validate.WrapErrorWithField( + fmt.Errorf("invalid header: %s", line), + "header", + ) + } + key := strings.TrimSpace(parts[0]) + value := strings.TrimSpace(strings.Join(parts[1:], ":")) + headerLines = append(headerLines, &HTTPHeader{ + Key: key, + Value: value, + }) + } + return &APISenderHeaders{ + Headers: headerLines, + }, nil +} + +// MarshalJSON implements the json.Marshaler interface +func (s APISenderHeaders) MarshalJSON() ([]byte, error) { + return json.Marshal(s.String()) +} + +// UnmarshalJSON unmarshals the json into a string +func (s *APISenderHeaders) UnmarshalJSON(data []byte) error { + var header string + if err := json.Unmarshal(data, &header); err != nil { + return err + } + ss, err := NewAPISenderHeader(header) + if err != nil { + return err + } + s.Headers = ss.Headers + return nil +} + +// String returns the string representation of the APISenderHeader +func (a APISenderHeaders) String() string { + headers := "" + for _, header := range a.Headers { + headers += header.Key + ": " + header.Value + "\n" + } + return headers +} + +func NewAPISenderExample() *APISender { + apiSenderRequestHeaders, err := NewAPISenderHeader("foo: bar") + if err != nil { + panic("APISender example data MUST be valid") + } + return &APISender{ + Name: nullable.NewNullableWithValue( + *vo.NewString64Must("Example"), + ), + APIKey: nullable.NewNullableWithValue( + *vo.NewOptionalString255Must( + "rj90jf09jr09j2r", + ), + ), + CustomField1: nullable.NewNullableWithValue( + *vo.NewOptionalString255Must( + "custom1", + ), + ), + CustomField2: nullable.NewNullableWithValue( + *vo.NewOptionalString255Must( + "custom2", + ), + ), + CustomField3: nullable.NewNullableWithValue( + *vo.NewOptionalString255Must( + "custom3", + ), + ), + CustomField4: nullable.NewNullableWithValue( + *vo.NewOptionalString255Must( + "custom4", + ), + ), + RequestMethod: nullable.NewNullableWithValue( + *vo.NewHTTPMethodMust("POST"), + ), + RequestURL: nullable.NewNullableWithValue( + *vo.NewString255Must("https://example.com"), + ), + RequestHeaders: nullable.NewNullableWithValue( + *apiSenderRequestHeaders, + ), + RequestBody: nullable.NewNullableWithValue( + *vo.NewOptionalString1MBMust("

Hello World

"), + ), + ExpectedResponseStatusCode: nullable.NewNullableWithValue(200), + ExpectedResponseHeaders: nullable.NewNullableWithValue( + *apiSenderRequestHeaders, + ), + ExpectedResponseBody: nullable.NewNullableWithValue( + *vo.NewOptionalString1MBMust("

World

"), + ), + } +} diff --git a/backend/model/asset.go b/backend/model/asset.go new file mode 100644 index 0000000..dccf80b --- /dev/null +++ b/backend/model/asset.go @@ -0,0 +1,79 @@ +package model + +import ( + "mime/multipart" + "time" + + "github.com/google/uuid" + "github.com/oapi-codegen/nullable" + "github.com/phishingclub/phishingclub/validate" + "github.com/phishingclub/phishingclub/vo" +) + +// Asset is a file Asset entity +type Asset struct { + ID nullable.Nullable[uuid.UUID] `json:"id"` + CreatedAt *time.Time `json:"createdAt"` + UpdatedAt *time.Time `json:"updatedAt"` + CompanyID nullable.Nullable[uuid.UUID] `json:"companyID"` + DomainName nullable.Nullable[vo.String255] `json:"domainName"` + DomainID nullable.Nullable[uuid.UUID] `json:"domainID"` + Name nullable.Nullable[vo.OptionalString127] `json:"name"` + Description nullable.Nullable[vo.OptionalString255] `json:"description"` + Path nullable.Nullable[vo.RelativeFilePath] `json:"path"` + File multipart.FileHeader `json:"-"` +} + +// Validate checks if the Asset has a valid state +func (a *Asset) Validate() error { + if err := validate.NullableFieldRequired("name", a.Name); err != nil { + return err + } + return nil +} + +// ToDBMap converts the fields that can be stored or updated to a map +// if the value is nullable and not set, it is not included +// if the value is nullable and set, it is included, if it is null, it is set to nil +func (a *Asset) ToDBMap() map[string]any { + m := map[string]any{} + if a.CompanyID.IsSpecified() { + if a.CompanyID.IsNull() { + m["company_id"] = nil + } else { + m["company_id"] = a.CompanyID.MustGet() + } + } + if a.DomainName.IsSpecified() { + m["domain_name"] = nil + if domainName, err := a.DomainName.Get(); err == nil { + m["domain_name"] = domainName.String() + } + } + // TODO is a global asset attached to a domain? if not then this should be possible to set to null like company ID + if a.DomainID.IsSpecified() { + m["domain_id"] = nil + if domainID, err := a.DomainID.Get(); err == nil { + m["domain_id"] = domainID.String() + } + } + if a.Name.IsSpecified() { + m["name"] = nil + if name, err := a.Name.Get(); err == nil { + m["name"] = name.String() + } + } + if a.Description.IsSpecified() { + m["description"] = nil + if description, err := a.Description.Get(); err == nil { + m["description"] = description.String() + } + } + if a.Path.IsSpecified() { + m["path"] = nil + if path, err := a.Path.Get(); err == nil { + m["path"] = path.String() + } + } + return m +} diff --git a/backend/model/attachment.go b/backend/model/attachment.go new file mode 100644 index 0000000..472de22 --- /dev/null +++ b/backend/model/attachment.go @@ -0,0 +1,76 @@ +package model + +import ( + "mime/multipart" + "time" + + "github.com/google/uuid" + "github.com/oapi-codegen/nullable" + "github.com/phishingclub/phishingclub/validate" + "github.com/phishingclub/phishingclub/vo" +) + +type Attachment struct { + ID nullable.Nullable[uuid.UUID] `json:"id"` + CreatedAt *time.Time `json:"createdAt"` + UpdatedAt *time.Time `json:"updatedAt"` + CompanyID nullable.Nullable[uuid.UUID] `json:"companyID"` + Name nullable.Nullable[vo.OptionalString127] `json:"name"` + Description nullable.Nullable[vo.OptionalString255] `json:"description"` + FileName nullable.Nullable[vo.FileName] `json:"fileName"` + EmbeddedContent nullable.Nullable[bool] `json:"embeddedContent"` + // path is the calculated path to the file + Path nullable.Nullable[vo.RelativeFilePath] `json:"path"` + // used in the API to upload files + File *multipart.FileHeader `json:"-"` +} + +func (a *Attachment) Validate() error { + if err := validate.NullableFieldRequired("name", a.Name); err != nil { + return err + } + return nil +} + +func (a *Attachment) ToDBMap() map[string]any { + m := map[string]any{} + if a.CompanyID.IsSpecified() { + if a.CompanyID.IsNull() { + m["company_id"] = nil + } else { + m["company_id"] = a.CompanyID.MustGet() + } + } + if a.Name.IsSpecified() { + m["name"] = nil + if name, err := a.Name.Get(); err == nil { + m["name"] = name.String() + } + } + if a.Description.IsSpecified() { + m["description"] = nil + if description, err := a.Description.Get(); err == nil { + m["description"] = description.String() + } + } + if a.FileName.IsSpecified() { + m["filename"] = nil + if fileName, err := a.FileName.Get(); err == nil { + m["filename"] = fileName.String() + } + // if name is not set, use file name + if m["name"] == nil { + m["name"] = m["filename"] + } + } + if a.Path.IsSpecified() { + m["path"] = nil + if path, err := a.Path.Get(); err == nil { + m["path"] = path.String() + } + } + if v, err := a.EmbeddedContent.Get(); err == nil { + m["embedded_content"] = v + } + return m +} diff --git a/backend/model/campaign.go b/backend/model/campaign.go new file mode 100644 index 0000000..2aaa7df --- /dev/null +++ b/backend/model/campaign.go @@ -0,0 +1,390 @@ +package model + +import ( + "fmt" + "time" + + "github.com/go-errors/errors" + + "github.com/google/uuid" + "github.com/oapi-codegen/nullable" + "github.com/phishingclub/phishingclub/errs" + "github.com/phishingclub/phishingclub/utils" + "github.com/phishingclub/phishingclub/validate" + "github.com/phishingclub/phishingclub/vo" +) + +// Campaign is a phishing Campaign entity +type Campaign struct { + ID nullable.Nullable[uuid.UUID] `json:"id"` + CreatedAt *time.Time `json:"createdAt"` + UpdatedAt *time.Time `json:"updatedAt"` + CloseAt nullable.Nullable[time.Time] `json:"closeAt"` + ClosedAt nullable.Nullable[time.Time] `json:"closedAt"` + AnonymizeAt nullable.Nullable[time.Time] `json:"anonymizeAt"` + AnonymizedAt nullable.Nullable[time.Time] `json:"anonymizedAt"` + SortField nullable.Nullable[vo.CampaignSortField] `json:"sortField"` + SortOrder nullable.Nullable[vo.CampaignSendingOrder] `json:"sortOrder"` + SendStartAt nullable.Nullable[time.Time] `json:"sendStartAt"` + SendEndAt nullable.Nullable[time.Time] `json:"sendEndAt"` + ConstraintWeekDays nullable.Nullable[vo.CampaignWeekDays] `json:"constraintWeekDays"` + ConstraintStartTime nullable.Nullable[vo.CampaignTimeConstraint] `json:"constraintStartTime"` + ConstraintEndTime nullable.Nullable[vo.CampaignTimeConstraint] `json:"constraintEndTime"` + + Name nullable.Nullable[vo.String64] `json:"name"` + + SaveSubmittedData nullable.Nullable[bool] `json:"saveSubmittedData"` + IsAnonymous nullable.Nullable[bool] `json:"isAnonymous"` + IsTest nullable.Nullable[bool] `json:"isTest"` + TemplateID nullable.Nullable[uuid.UUID] `json:"templateID"` + Template *CampaignTemplate `json:"template"` + CompanyID nullable.Nullable[uuid.UUID] `json:"companyID"` + Company *Company `json:"company"` + RecipientGroups []*RecipientGroup `json:"recipientGroups"` + RecipientGroupIDs nullable.Nullable[[]*uuid.UUID] `json:"recipientGroupIDs,omitempty"` + AllowDeny []*AllowDeny `json:"allowDeny"` + AllowDenyIDs nullable.Nullable[[]*uuid.UUID] `json:"allowDenyIDs,omitempty"` + DenyPageID nullable.Nullable[uuid.UUID] `json:"denyPageID,omitempty"` + DenyPage *Page `json:"denyPage"` + WebhookID nullable.Nullable[uuid.UUID] `json:"webhookID"` + + // must not be set by a user + NotableEventID nullable.Nullable[uuid.UUID] `json:"notableEventID"` + NotableEventName string `json:"notableEventName"` +} + +// Validate checks if the campaign has a valid state +func (c *Campaign) Validate() error { + if err := validate.NullableFieldRequired("name", c.Name); err != nil { + return err + } + if err := validate.NullableFieldRequired("sortField", c.SortField); err != nil { + return err + } + if err := validate.NullableFieldRequired("sortOrder", c.SortOrder); err != nil { + return err + } + // if a start or end is set, then end must be equal or after the start + if c.SendStartAt.IsSpecified() && !c.SendStartAt.IsNull() || (c.SendEndAt.IsSpecified() && !c.SendEndAt.IsNull()) { + if err := validate.NullableFieldRequired("sendStartAt", c.SendStartAt); err != nil { + return err + } + if err := validate.NullableFieldRequired("sendEndAt", c.SendEndAt); err != nil { + return err + } + if c.SendEndAt.MustGet().Before(c.SendStartAt.MustGet()) { + return validate.WrapErrorWithField(errors.New("send end time must be after start time"), "sendEndAt") + } + } + if err := validate.NullableFieldRequired("templateID", c.TemplateID); err != nil { + return err + } + if err := validate.NullableFieldRequired("RecipientGroupIDs", c.RecipientGroupIDs); err != nil { + return err + } + if len(c.RecipientGroupIDs.MustGet()) == 0 { + return validate.WrapErrorWithField(errors.New("must have at least one recipient group"), "RecipientGroupIDs") + } + if err := c.ValidateDenyPage(); err != nil { + return err + } + + // if ConstraintWeekDays or ConstraintStartTime or ConstraintEndTime is set, then this is a 'scheduled type' + // this requires that all fields are set and that end time is equal or after start time + if (c.ConstraintWeekDays.IsSpecified() && !c.ConstraintWeekDays.IsNull()) || + (c.ConstraintStartTime.IsSpecified() && !c.ConstraintStartTime.IsNull()) || + (c.ConstraintEndTime.IsSpecified() && !c.ConstraintEndTime.IsNull()) { + // check required fields are set + if err := c.ValidateSendTimesSet(); err != nil { + return err + } + if err := validate.NullableFieldRequired("ConstraintWeekDays", c.ConstraintWeekDays); err != nil { + return err + } + if c.ConstraintWeekDays.MustGet().Count() == 0 { + return validate.WrapErrorWithField(errors.New("must have at least one day selected"), "ConstraintWeekDays") + } + if err := validate.NullableFieldRequired("ConstraintStartTime", c.ConstraintStartTime); err != nil { + return err + } + if err := validate.NullableFieldRequired("ConstraintEndTime", c.ConstraintEndTime); err != nil { + return err + } + // check that times and days are valid + constraintStartTime := c.ConstraintStartTime.MustGet() + constraintEndTime := c.ConstraintEndTime.MustGet() + if constraintStartTime.IsAfter(constraintEndTime) { + return validate.WrapErrorWithField(errors.New("constraint end time must be after start time"), "ConstraintEndTime") + } + if constraintStartTime.IsEqual(constraintEndTime) { + return validate.WrapErrorWithField(errors.New("constraint end time must be after start time"), "ConstraintEndTime") + } + startAt := c.SendStartAt.MustGet() + endAt := c.SendEndAt.MustGet() + // check that selected weekdays are within the start and end date + isWithin := c.ConstraintWeekDays.MustGet().IsWithin(&startAt, &endAt) + if !isWithin { + return validate.WrapErrorWithField( + fmt.Errorf( + "constraint week days must be within the start (%s) and end date (%s)", + startAt.Format("2006-01-02"), + endAt.Format("2006-01-02"), + ), + "ConstraintWeekDays", + ) + } + } + // ensure closeAt and anonymize is correctly set after other dates if set + if c.CloseAt.IsSpecified() && !c.CloseAt.IsNull() { + closeAt := c.CloseAt.MustGet() + if v, err := c.SendEndAt.Get(); err == nil { + if closeAt.Before(v) { + return validate.WrapErrorWithField(errors.New("close at must be after end date"), "CloseAt") + } + } + } + if c.AnonymizeAt.IsSpecified() && !c.AnonymizeAt.IsNull() { + anonymizeAt := c.AnonymizeAt.MustGet() + if v, err := c.CloseAt.Get(); err == nil { + if anonymizeAt.Before(v) { + return validate.WrapErrorWithField(errors.New("anonymize at must be after close date"), "AnonymizeAt") + } + } + if v, err := c.SendEndAt.Get(); err != nil { + if anonymizeAt.Before(v) { + return validate.WrapErrorWithField(errors.New("anonymize at must be after end date"), "AnonymizeAt") + } + } + if v, err := c.SendStartAt.Get(); err == nil { + if anonymizeAt.Before(v) { + return validate.WrapErrorWithField(errors.New("anonymize at must be after start date"), "AnonymizeAt") + } + } + } + // must not be set from api consumers + if c.NotableEventID.IsSpecified() && !c.NotableEventID.IsNull() { + c.NotableEventID.SetNull() + } + + return nil +} + +// ValidateSendTimesSet checks that the send start and end times are set +func (c *Campaign) ValidateSendTimesSet() error { + if err := validate.NullableFieldRequired("sendStartAt", c.SendStartAt); err != nil { + return err + } + if err := validate.NullableFieldRequired("sendEndAt", c.SendEndAt); err != nil { + return err + } + return nil +} + +// ValidateScheduledType checks times related to a scheduled type campaign +func (c *Campaign) ValidateScheduledTimes() error { + if err := c.ValidateSendTimesSet(); err != nil { + return err + } + if err := validate.NullableFieldRequired("ConstraintWeekDays", c.ConstraintWeekDays); err != nil { + return err + } + if err := validate.NullableFieldRequired("ConstraintStartTime", c.ConstraintStartTime); err != nil { + return err + } + if err := validate.NullableFieldRequired("ConstraintEndTime", c.ConstraintEndTime); err != nil { + return err + } + return nil +} + +// ValidateNoSendTimesSet checks that the send start and end times are not set +func (c *Campaign) ValidateNoSendTimesSet() error { + if c.SendStartAt.IsSpecified() && !c.SendStartAt.IsNull() { + return validate.WrapErrorWithField(errors.New("send start time must not be set"), "sendStartAt") + } + if c.SendEndAt.IsSpecified() && !c.SendEndAt.IsNull() { + return validate.WrapErrorWithField(errors.New("send end time must not be set"), "sendEndAt") + } + return nil +} + +// ValidateDenyPage checks that a deny page is set +func (c *Campaign) ValidateDenyPage() error { + if c.DenyPageID.IsSpecified() && !c.DenyPageID.IsNull() { + if c.AllowDenyIDs.IsSpecified() && (c.AllowDenyIDs.IsNull() || len(c.AllowDenyIDs.MustGet()) == 0) { + return validate.WrapErrorWithField(errors.New("requires a allow deny IDs to be set"), "denyPage") + } + } + return nil +} + +// ToDBMap converts the fields that can be stored or updated to a map +// if the value is nullable and not set, it is not included +// if the value is nullable and set, it is included, if it is null, it is set to nil +func (c *Campaign) ToDBMap() map[string]any { + m := map[string]any{} + if c.Name.IsSpecified() { + m["name"] = nil + if v, err := c.Name.Get(); err == nil { + m["name"] = v.String() + } + } + if c.SortField.IsSpecified() { + m["sort_field"] = nil + if v, err := c.SortField.Get(); err == nil { + m["sort_field"] = v.String() + } + } + if c.SortOrder.IsSpecified() { + m["sort_order"] = nil + if v, err := c.SortOrder.Get(); err == nil { + m["sort_order"] = v.String() + } + } + if c.SendStartAt.IsSpecified() { + m["send_start_at"] = nil + if v, err := c.SendStartAt.Get(); err == nil { + m["send_start_at"] = utils.RFC3339UTC(v) + } + } + if c.ConstraintWeekDays.IsSpecified() { + m["constraint_week_days"] = nil + if v, err := c.ConstraintWeekDays.Get(); err == nil { + m["constraint_week_days"] = v.Int() + } + } + if c.ConstraintStartTime.IsSpecified() { + m["constraint_start_time"] = nil + if v, err := c.ConstraintStartTime.Get(); err == nil { + m["constraint_start_time"] = v.String() + } + } + if c.ConstraintEndTime.IsSpecified() { + m["constraint_end_time"] = nil + if v, err := c.ConstraintEndTime.Get(); err == nil { + m["constraint_end_time"] = v.String() + } + } + if c.SendEndAt.IsSpecified() { + m["send_end_at"] = nil + if v, err := c.SendEndAt.Get(); err == nil { + m["send_end_at"] = utils.RFC3339UTC(v) + } + } + if c.CloseAt.IsSpecified() { + m["close_at"] = nil + if v, err := c.CloseAt.Get(); err == nil { + m["close_at"] = utils.RFC3339UTC(v) + } + } + if c.ClosedAt.IsSpecified() { + m["closed_at"] = nil + if v, err := c.ClosedAt.Get(); err == nil { + m["closed_at"] = utils.RFC3339UTC(v) + } + } + if c.AnonymizeAt.IsSpecified() { + m["anonymize_at"] = nil + if v, err := c.AnonymizeAt.Get(); err == nil { + m["anonymize_at"] = utils.RFC3339UTC(v) + } + } + if c.SaveSubmittedData.IsSpecified() { + m["save_submitted_data"] = false + if v, err := c.SaveSubmittedData.Get(); err == nil { + m["save_submitted_data"] = v + } + } + if c.IsTest.IsSpecified() { + m["is_test"] = false + if v, err := c.IsTest.Get(); err == nil { + m["is_test"] = v + } + } + if c.IsAnonymous.IsSpecified() { + m["is_anonymous"] = false + if v, err := c.IsAnonymous.Get(); err == nil { + m["is_anonymous"] = v + } + } + if c.TemplateID.IsSpecified() { + m["campaign_template_id"] = nil + if v, err := c.TemplateID.Get(); err == nil { + m["campaign_template_id"] = v.String() + } + } + if c.CompanyID.IsSpecified() { + if c.CompanyID.IsNull() { + m["company_id"] = nil + } else { + m["company_id"] = c.CompanyID.MustGet() + } + } + allowDenyIsSet := c.AllowDenyIDs.IsSpecified() && !c.AllowDenyIDs.IsNull() && len(c.AllowDenyIDs.MustGet()) > 0 + if allowDenyIsSet { + if v, err := c.DenyPageID.Get(); err == nil { + m["deny_page_id"] = v.String() + } else { + m["deny_page_id"] = nil + } + } else { + m["deny_page_id"] = nil + } + if c.WebhookID.IsSpecified() { + m["webhook_id"] = nil + if v, err := c.WebhookID.Get(); err == nil { + m["webhook_id"] = v.String() + } + } + if v, err := c.NotableEventID.Get(); err == nil { + m["notable_event_id"] = v.String() + } + + return m +} + +// Close sets the close at timestamp to now +// dont confuse with method Closed +func (c *Campaign) Close() error { + if c.ClosedAt.IsSpecified() && !c.ClosedAt.IsNull() { + return errs.ErrCampaignAlreadyClosed + } + if c.CloseAt.IsSpecified() && !c.CloseAt.IsNull() { + return errs.ErrCampaignAlreadySetToClose + } + c.CloseAt.Set(time.Now().UTC()) + return nil +} + +// Closed sets the closed at timestamp to now +// dont confuse with method Close +func (c *Campaign) Closed() error { + if c.ClosedAt.IsSpecified() && !c.ClosedAt.IsNull() { + return errs.ErrCampaignAlreadyClosed + } + c.ClosedAt.Set(time.Now().UTC()) + return nil +} + +// Anonymize sets the anonymized at timestamp +func (c *Campaign) Anonymize() error { + if c.AnonymizedAt.IsSpecified() && !c.AnonymizedAt.IsNull() { + return errs.ErrCampaignAlreadyAnonymized + } + c.AnonymizedAt.Set(time.Now().UTC()) + return nil +} + +// IsActive returns true if the campaign is active +func (c *Campaign) IsActive() bool { + now := time.Now() + if c.ClosedAt.IsSpecified() && !c.ClosedAt.IsNull() && c.ClosedAt.MustGet().Before(now) { + return false + } + return true +} + +// IsSelfManaged returns true if the campaign is self managed +func (c *Campaign) IsSelfManaged() bool { + return c.SendStartAt.IsSpecified() && c.SendStartAt.IsNull() && c.SendEndAt.IsSpecified() && c.SendEndAt.IsNull() +} diff --git a/backend/model/campaignEvent.go b/backend/model/campaignEvent.go new file mode 100644 index 0000000..f20fe46 --- /dev/null +++ b/backend/model/campaignEvent.go @@ -0,0 +1,22 @@ +package model + +import ( + "time" + + "github.com/google/uuid" + "github.com/phishingclub/phishingclub/vo" +) + +type CampaignEvent struct { + ID *uuid.UUID `json:"id"` + CreatedAt *time.Time `json:"createdAt"` + CampaignID *uuid.UUID `json:"campaignID"` + IP *vo.OptionalString64 `json:"ip"` + UserAgent *vo.OptionalString255 `json:"userAgent"` + Data *vo.OptionalString1MB `json:"data"` + AnonymizedID *uuid.UUID `json:"anonymizedID"` + // if null the recipient has been anonymized + RecipientID *uuid.UUID `json:"recipientID"` + Recipient *Recipient `json:"recipient,omitempty"` + EventID *uuid.UUID `json:"eventID"` +} diff --git a/backend/model/campaignRecipient.go b/backend/model/campaignRecipient.go new file mode 100644 index 0000000..7fc6491 --- /dev/null +++ b/backend/model/campaignRecipient.go @@ -0,0 +1,109 @@ +package model + +import ( + "time" + + "github.com/go-errors/errors" + + "github.com/google/uuid" + "github.com/oapi-codegen/nullable" + "github.com/phishingclub/phishingclub/utils" + "github.com/phishingclub/phishingclub/validate" +) + +// CampaignRecipient is a campaign recipient +// this model must not be consumed from a endpoint +type CampaignRecipient struct { + ID nullable.Nullable[uuid.UUID] `json:"id"` + CreatedAt *time.Time `json:"createdAt"` + UpdatedAt *time.Time `json:"updatedAt"` + CancelledAt nullable.Nullable[time.Time] `json:"cancelledAt"` + SendAt nullable.Nullable[time.Time] `json:"sendAt"` + LastAttemptAt nullable.Nullable[time.Time] `json:"lastAttemptAt"` + SentAt nullable.Nullable[time.Time] `json:"sentAt"` + SelfManaged nullable.Nullable[bool] `json:"selfManaged"` + AnonymizedID nullable.Nullable[uuid.UUID] `json:"anonymizedID"` + CampaignID nullable.Nullable[uuid.UUID] `json:"campaignID"` + Campaign *Campaign `json:"campaign"` + // null recipientID means that the data has been anonymized + RecipientID nullable.Nullable[uuid.UUID] `json:"recipientID"` + Recipient *Recipient `json:"recipient"` + NotableEventID nullable.Nullable[uuid.UUID] `json:"notableEventID"` + NotableEventName string `json:"notableEventName"` +} + +// Validate validates the campaign recipient +func (c *CampaignRecipient) Validate() error { + if err := validate.NullableFieldRequired("campaignID", c.CampaignID); err != nil { + return err + } + anonymizedAtErr := validate.NullableFieldRequired("anonymizedID", c.AnonymizedID) + recipientIDErr := validate.NullableFieldRequired("recipientID", c.RecipientID) + if anonymizedAtErr == nil && recipientIDErr == nil { + return recipientIDErr + } + if anonymizedAtErr != nil && recipientIDErr != nil { + return validate.WrapErrorWithField( + errors.New("AnonymizedID can not be set with recipientID"), + "recipientID", + ) + } + return nil +} + +// ToDBMap converts the fields that can be stored or updated to a map +// if the value is nullable and not set, it is not included +// if the value is nullable and set, it is included, if it is null, it is set to nil +func (c *CampaignRecipient) ToDBMap() map[string]any { + m := map[string]any{} + if c.CancelledAt.IsSpecified() { + m["cancelled_at"] = nil + if v, err := c.CancelledAt.Get(); err == nil { + m["cancelled_at"] = utils.RFC3339UTC(v) + } + } + if c.SendAt.IsSpecified() { + m["send_at"] = nil + if v, err := c.SendAt.Get(); err == nil { + m["send_at"] = utils.RFC3339UTC(v) + } + } + if c.LastAttemptAt.IsSpecified() { + m["last_attempt_at"] = nil + if v, err := c.LastAttemptAt.Get(); err == nil { + m["last_attempt_at"] = utils.RFC3339UTC(v) + } + } + if c.SentAt.IsSpecified() { + m["sent_at"] = nil + if v, err := c.SentAt.Get(); err == nil { + m["sent_at"] = utils.RFC3339UTC(v) + } + } + if c.SelfManaged.IsSpecified() { + m["self_managed"] = nil + if v, err := c.SelfManaged.Get(); err == nil { + m["self_managed"] = v + } + } + if c.CampaignID.IsSpecified() { + m["campaign_id"] = nil + if v, err := c.CampaignID.Get(); err == nil { + m["campaign_id"] = v + } + } + if c.RecipientID.IsSpecified() { + m["recipient_id"] = nil + if v, err := c.RecipientID.Get(); err == nil { + m["recipient_id"] = v + } + } + if c.NotableEventID.IsSpecified() { + m["notable_event_id"] = nil + if v, err := c.NotableEventID.Get(); err == nil { + m["notable_event_id"] = v + } + } + + return m +} diff --git a/backend/model/campaignResultView.go b/backend/model/campaignResultView.go new file mode 100644 index 0000000..938e338 --- /dev/null +++ b/backend/model/campaignResultView.go @@ -0,0 +1,9 @@ +package model + +type CampaignResultView struct { + Recipients int64 `json:"recipients"` + EmailsSent int64 `json:"emailsSent"` + TrackingPixelLoaded int64 `json:"trackingPixelLoaded"` + WebsiteLoaded int64 `json:"clickedLink"` + SubmittedData int64 `json:"submittedData"` +} diff --git a/backend/model/campaignTemplate.go b/backend/model/campaignTemplate.go new file mode 100644 index 0000000..2107cf3 --- /dev/null +++ b/backend/model/campaignTemplate.go @@ -0,0 +1,187 @@ +package model + +import ( + "time" + + "github.com/go-errors/errors" + + "github.com/google/uuid" + "github.com/oapi-codegen/nullable" + "github.com/phishingclub/phishingclub/errs" + "github.com/phishingclub/phishingclub/validate" + "github.com/phishingclub/phishingclub/vo" +) + +// CampaignTemplate is a campaign template +type CampaignTemplate struct { + ID nullable.Nullable[uuid.UUID] `json:"id"` + CreatedAt *time.Time `json:"createdAt"` + UpdatedAt *time.Time `json:"updatedAt"` + + Name nullable.Nullable[vo.String64] `json:"name"` + + DomainID nullable.Nullable[uuid.UUID] `json:"domainID"` + Domain *Domain `json:"domain"` + + BeforeLandingPageID nullable.Nullable[uuid.UUID] `json:"beforeLandingPageID"` + BeforeLandingePage *Page `json:"beforeLandingPage"` + + LandingPageID nullable.Nullable[uuid.UUID] `json:"landingPageID"` + LandingPage *Page `json:"landingPage"` + + AfterLandingPageID nullable.Nullable[uuid.UUID] `json:"afterLandingPageID"` + AfterLandingPage *Page `json:"afterLandingPage"` + + AfterLandingPageRedirectURL nullable.Nullable[vo.OptionalString255] `json:"afterLandingPageRedirectURL"` + + URLIdentifierID nullable.Nullable[*uuid.UUID] `json:"urlIdentifierID"` + URLIdentifier *Identifier `json:"urlIdentifier"` + + StateIdentifierID nullable.Nullable[*uuid.UUID] `json:"stateIdentifierID"` + StateIdentifier *Identifier `json:"stateIdentifier"` + + URLPath nullable.Nullable[vo.URLPath] `json:"urlPath"` + + EmailID nullable.Nullable[uuid.UUID] `json:"emailID"` + Email *Email `json:"email"` + + SMTPConfigurationID nullable.Nullable[uuid.UUID] `json:"smtpConfigurationID"` + SMTPConfiguration *SMTPConfiguration `json:"smtpConfiguration"` + + APISenderID nullable.Nullable[uuid.UUID] `json:"apiSenderID"` + APISender *APISender `json:"apiSender"` + + CompanyID nullable.Nullable[uuid.UUID] `json:"companyID"` + Company *Company `json:"company"` + + IsUsable nullable.Nullable[bool] `json:"isUsable"` +} + +// Validate checks if the campaign template has a valid state +func (c *CampaignTemplate) Validate() error { + if err := validate.NullableFieldRequired("name", c.Name); err != nil { + return err + } + if err := validate.NullableFieldRequired("urlIdentifierID", c.URLIdentifierID); err != nil { + return err + } + if err := validate.NullableFieldRequired("stateIdentifierID", c.StateIdentifierID); err != nil { + return err + } + if a, err := c.URLIdentifierID.Get(); err == nil { + if b, err := c.StateIdentifierID.Get(); err == nil { + if a.String() == b.String() { + return errs.NewValidationError( + errors.New("URL and state identifier can not be the same"), + ) + } + } + } + if err := validate.NullableFieldRequired("urlPath", c.URLPath); err != nil { + return err + } + return nil +} + +// ToDBMap converts the fields that can be stored or updated to a map +// if the value is nullable and not set, it is not included +// if the value is nullable and set, it is included, if it is null, it is set to nil +func (c *CampaignTemplate) ToDBMap() map[string]any { + m := map[string]any{} + if c.Name.IsSpecified() { + m["name"] = nil + if name, err := c.Name.Get(); err == nil { + m["name"] = name.String() + } + } + if c.DomainID.IsSpecified() { + if c.DomainID.IsNull() { + m["domain_id"] = nil + } else { + m["domain_id"] = c.DomainID.MustGet() + } + } + + if c.BeforeLandingPageID.IsSpecified() { + if c.BeforeLandingPageID.IsNull() { + m["before_landing_page_id"] = nil + } else { + m["before_landing_page_id"] = c.BeforeLandingPageID.MustGet() + } + } + + if c.LandingPageID.IsSpecified() { + if c.LandingPageID.IsNull() { + m["landing_page_id"] = nil + } else { + m["landing_page_id"] = c.LandingPageID.MustGet() + } + } + + if c.AfterLandingPageID.IsSpecified() { + if c.AfterLandingPageID.IsNull() { + m["after_landing_page_id"] = nil + } else { + m["after_landing_page_id"] = c.AfterLandingPageID.MustGet() + } + } + if c.AfterLandingPageRedirectURL.IsSpecified() { + if c.AfterLandingPageRedirectURL.IsNull() { + m["after_landing_page_redirect_url"] = nil + } else { + m["after_landing_page_redirect_url"] = c.AfterLandingPageRedirectURL.MustGet().String() + } + } + + if c.EmailID.IsSpecified() { + if c.EmailID.IsNull() { + m["email_id"] = nil + } else { + m["email_id"] = c.EmailID.MustGet() + } + } + if c.SMTPConfigurationID.IsSpecified() { + if c.SMTPConfigurationID.IsNull() { + m["smtp_configuration_id"] = nil + } else { + m["smtp_configuration_id"] = c.SMTPConfigurationID.MustGet() + } + } + if c.APISenderID.IsSpecified() { + if c.APISenderID.IsNull() { + m["api_sender_id"] = nil + } else { + m["api_sender_id"] = c.APISenderID.MustGet() + } + } + + if c.CompanyID.IsSpecified() { + if c.CompanyID.IsNull() { + m["company_id"] = nil + } else { + m["company_id"] = c.CompanyID.MustGet() + } + } + if v, err := c.URLIdentifierID.Get(); err == nil { + m["url_identifier_id"] = v + } + if v, err := c.StateIdentifierID.Get(); err == nil { + m["state_identifier_id"] = v + } + if v, err := c.URLPath.Get(); err == nil { + m["url_path"] = v.String() + } + + _, errDomain := c.DomainID.Get() + _, errSMTP := c.SMTPConfigurationID.Get() + _, errAPISender := c.APISenderID.Get() + _, errEmail := c.EmailID.Get() + _, errLandingPage := c.LandingPageID.Get() + + m["is_usable"] = errDomain == nil && + errEmail == nil && + errLandingPage == nil && + (errSMTP == nil || errAPISender == nil) + + return m +} diff --git a/backend/model/campaignsStatView.go b/backend/model/campaignsStatView.go new file mode 100644 index 0000000..3b5d645 --- /dev/null +++ b/backend/model/campaignsStatView.go @@ -0,0 +1,7 @@ +package model + +type CampaignsStatView struct { + Active int64 `json:"active"` + Upcoming int64 `json:"upcoming"` + Finished int64 `json:"finished"` +} diff --git a/backend/model/company.go b/backend/model/company.go new file mode 100644 index 0000000..a142713 --- /dev/null +++ b/backend/model/company.go @@ -0,0 +1,40 @@ +package model + +import ( + "time" + + "github.com/google/uuid" + "github.com/oapi-codegen/nullable" + "github.com/phishingclub/phishingclub/validate" + "github.com/phishingclub/phishingclub/vo" +) + +// Company is a company +type Company struct { + ID nullable.Nullable[uuid.UUID] `json:"id"` + CreatedAt *time.Time `json:"createdAt"` + UpdatedAt *time.Time `json:"updatedAt"` + Name nullable.Nullable[vo.String64] `json:"name"` +} + +// Validate checks if the Company configuration with a valid state +func (c *Company) Validate() error { + if err := validate.NullableFieldRequired("name", c.Name); err != nil { + return err + } + return nil +} + +// ToDBMap converts the fields that can be stored or updated to a map +// if the value is nullable and not set, it is not included +// if the value is nullable and set, it is included, if it is null, it is set to nil +func (c *Company) ToDBMap() map[string]any { + m := map[string]any{} + if c.Name.IsSpecified() { + m["name"] = nil + if name, err := c.Name.Get(); err == nil { + m["name"] = name.String() + } + } + return m +} diff --git a/backend/model/domain.go b/backend/model/domain.go new file mode 100644 index 0000000..49324e7 --- /dev/null +++ b/backend/model/domain.go @@ -0,0 +1,166 @@ +package model + +import ( + "time" + + "github.com/go-errors/errors" + + "github.com/google/uuid" + "github.com/oapi-codegen/nullable" + "github.com/phishingclub/phishingclub/errs" + "github.com/phishingclub/phishingclub/validate" + "github.com/phishingclub/phishingclub/vo" +) + +type Domain struct { + ID nullable.Nullable[uuid.UUID] `json:"id"` + CreatedAt *time.Time `json:"createdAt"` + UpdatedAt *time.Time `json:"updatedAt"` + Name nullable.Nullable[vo.String255] `json:"name"` + HostWebsite nullable.Nullable[bool] `json:"hostWebsite"` + ManagedTLS nullable.Nullable[bool] `json:"managedTLS"` + OwnManagedTLS nullable.Nullable[bool] `json:"ownManagedTLS"` + // private key + OwnManagedTLSKey nullable.Nullable[string] `json:"ownManagedTLSKey"` + // cert + OwnManagedTLSPem nullable.Nullable[string] `json:"ownManagedTLSPem"` + PageContent nullable.Nullable[vo.OptionalString1MB] `json:"pageContent"` + PageNotFoundContent nullable.Nullable[vo.OptionalString1MB] `json:"pageNotFoundContent"` + RedirectURL nullable.Nullable[vo.OptionalString1024] `json:"redirectURL"` + CompanyID nullable.Nullable[uuid.UUID] `json:"companyID"` + Company *Company `json:"company"` +} + +// Validate checks if the Domain configuration with a valid state +func (d *Domain) Validate() error { + if err := validate.NullableFieldRequired("name", d.Name); err != nil { + return err + } + if err := validate.NullableFieldRequired("hostWebsite", d.HostWebsite); err != nil { + return err + } + if err := validate.NullableFieldRequired("managedTLS", d.ManagedTLS); err != nil { + return err + } + if err := validate.NullableFieldRequired("pageContent", d.PageContent); err != nil { + return err + } + if err := validate.NullableFieldRequired("pageNotFoundContent", d.PageNotFoundContent); err != nil { + return err + } + if err := validate.NullableFieldRequired("redirectURL", d.RedirectURL); err != nil { + return err + } + // + // + ownManagedTLS, err := d.OwnManagedTLS.Get() + ownManagedTLSSet := err == nil && ownManagedTLS + + // cant both have managed and own managed tls + if managedTLS, err := d.ManagedTLS.Get(); err == nil && managedTLS && ownManagedTLSSet { + return errs.NewValidationError(errors.New( + "Domain TLS can not both be managed and own managed", + )) + } + if ownManagedTLS { + // handle own managed ManagedTLS + ownManagedTLSKey, err := d.OwnManagedTLSKey.Get() + ownManagedTLSPem, err := d.OwnManagedTLSPem.Get() + ownManagedTLSKeyIsSet := err == nil && len(ownManagedTLSKey) > 0 + ownManagedTLSPemIsSet := err == nil && len(ownManagedTLSPem) > 0 + // both must be set, not one of + if (ownManagedTLSKeyIsSet && !ownManagedTLSPemIsSet) || + (!ownManagedTLSKeyIsSet && ownManagedTLSPemIsSet) { + return errs.NewValidationError(errors.New( + "Own managed TLS requires a private key (.key) and a certificate (.pem)", + )) + } + } + /* + // TODO hostWebsite vs redirectURL are mutually exclusive + hostWebsite := d.HostWebsite.MustGet() + redirectURL := d.RedirectURL.MustGet() + redirectURLLen := len(redirectURL.String()) + if hostWebsite && redirectURLLen > 0 { + return validate.WrapErrorWithField( + errors.New("both can not be set"), + "Host website and redirect url", + ) + } */ + return nil +} + +// ToDBMap converts the fields that can be stored or updated to a map +// if the value is nullable and not set, it is not included +// if the value is nullable and set, it is included, if it is null, it is set to nil +func (d *Domain) ToDBMap() map[string]any { + m := map[string]any{} + if d.Name.IsSpecified() { + m["name"] = nil + if name, err := d.Name.Get(); err == nil { + m["name"] = name.String() + } + } + if d.HostWebsite.IsSpecified() { + m["host_website"] = nil + if hostWebsite, err := d.HostWebsite.Get(); err == nil { + m["host_website"] = hostWebsite + } + m["redirect_url"] = "" + } + if d.RedirectURL.IsSpecified() { + m["redirect_url"] = nil + if redirectURL, err := d.RedirectURL.Get(); err == nil { + m["redirect_url"] = redirectURL.String() + } + } + if d.PageContent.IsSpecified() { + m["page_content"] = nil + if staticPage, err := d.PageContent.Get(); err == nil { + m["page_content"] = staticPage.String() + } + } + if d.PageNotFoundContent.IsSpecified() { + m["page_not_found_content"] = nil + if staticNotFound, err := d.PageNotFoundContent.Get(); err == nil { + m["page_not_found_content"] = staticNotFound.String() + } + } + if d.CompanyID.IsSpecified() { + if d.CompanyID.IsNull() { + m["company_id"] = nil + } else { + m["company_id"] = d.CompanyID.MustGet() + } + } + if d.ManagedTLS.IsSpecified() { + m["managed_tls_certs"] = false + if d.ManagedTLS.IsNull() { + m["managed_tls_certs"] = nil + } else { + m["managed_tls_certs"] = d.ManagedTLS.MustGet() + } + } + if d.OwnManagedTLS.IsSpecified() { + m["own_managed_tls"] = false + if d.OwnManagedTLS.IsNull() { + m["own_managed_tls"] = nil + } else { + m["own_managed_tls"] = d.OwnManagedTLS.MustGet() + } + } + return m +} + +// DomainOverview is a subset of the domain as used as read-only +type DomainOverview struct { + ID uuid.UUID `json:"id,omitempty"` + CreatedAt *time.Time `json:"createdAt"` + UpdatedAt *time.Time `json:"updatedAt"` + Name string `json:"name"` + HostWebsite bool `json:"hostWebsite"` + ManagedTLS bool `json:"managedTLS"` + OwnManagedTLS bool `json:"ownManagedTLS"` + RedirectURL string `json:"redirectURL"` + CompanyID *uuid.UUID `json:"companyID"` +} diff --git a/backend/model/email.go b/backend/model/email.go new file mode 100644 index 0000000..a5c285e --- /dev/null +++ b/backend/model/email.go @@ -0,0 +1,134 @@ +package model + +import ( + "time" + + "github.com/google/uuid" + "github.com/oapi-codegen/nullable" + "github.com/phishingclub/phishingclub/validate" + "github.com/phishingclub/phishingclub/vo" +) + +// Email is a e-mail +type Email struct { + ID nullable.Nullable[uuid.UUID] `json:"id"` + CreatedAt *time.Time `json:"createdAt"` + UpdatedAt *time.Time `json:"updatedAt"` + Name nullable.Nullable[vo.String64] `json:"name"` + MailEnvelopeFrom nullable.Nullable[vo.MailEnvelopeFrom] `json:"mailEnvelopeFrom"` // Bounce / Return-Path + MailHeaderFrom nullable.Nullable[vo.Email] `json:"mailHeaderFrom"` + MailHeaderSubject nullable.Nullable[vo.OptionalString255] `json:"mailHeaderSubject"` + Content nullable.Nullable[vo.OptionalString1MB] `json:"content"` + AddTrackingPixel nullable.Nullable[bool] `json:"addTrackingPixel"` + CompanyID nullable.Nullable[uuid.UUID] `json:"companyID"` + + Attachments []*Attachment `json:"attachments"` + Company *Company `json:"company"` +} + +// Validate checks if the mail has a valid state +func (m *Email) Validate() error { + if err := validate.NullableFieldRequired("name", m.Name); err != nil { + return err + } + if err := validate.NullableFieldRequired("mailEnvelopeFrom", m.MailEnvelopeFrom); err != nil { + return err + } + if err := validate.NullableFieldRequired("mailHeaderSubject", m.MailHeaderSubject); err != nil { + return err + } + if err := validate.NullableFieldRequired("addTrackingPixel", m.MailHeaderFrom); err != nil { + return err + } + if err := validate.NullableFieldRequired("Content", m.Content); err != nil { + return err + } + return nil +} + +// ToDBMap converts the fields that can be stored or updated to a map +// if the value is nullable and not set, it is not included +// if the value is nullable and set, it is included, if it is null, it is set to nil +func (s *Email) ToDBMap() map[string]any { + m := map[string]any{} + if s.Name.IsSpecified() { + m["name"] = nil + if name, err := s.Name.Get(); err == nil { + m["name"] = name.String() + } + } + if s.MailEnvelopeFrom.IsSpecified() { + m["mail_from"] = nil + if envelopeFrom, err := s.MailEnvelopeFrom.Get(); err == nil { + m["mail_from"] = envelopeFrom.String() + } + } + if s.MailHeaderFrom.IsSpecified() { + m["from"] = nil + if headerFrom, err := s.MailHeaderFrom.Get(); err == nil { + m["from"] = headerFrom.String() + } + } + if s.MailHeaderSubject.IsSpecified() { + m["subject"] = nil + if headerSubject, err := s.MailHeaderSubject.Get(); err == nil { + m["subject"] = headerSubject.String() + } + } + if s.Content.IsSpecified() { + m["content"] = nil + if content, err := s.Content.Get(); err == nil { + m["content"] = content.String() + } + } + if s.AddTrackingPixel.IsSpecified() { + m["add_tracking_pixel"] = nil + if addTrackingPixel, err := s.AddTrackingPixel.Get(); err == nil { + m["add_tracking_pixel"] = addTrackingPixel + } + } + if s.CompanyID.IsSpecified() { + if s.CompanyID.IsNull() { + m["company_id"] = nil + } else { + m["company_id"] = s.CompanyID.MustGet() + } + } + return m +} + +func NewEmailExample() *Email { + return &Email{ + Name: nullable.NewNullableWithValue( + *vo.NewString64Must("ExampleEmail"), + ), + MailEnvelopeFrom: nullable.NewNullableWithValue( + *vo.NewMailEnvelopeFromMust("sender@example.test"), + ), + MailHeaderFrom: nullable.NewNullableWithValue( + *vo.NewEmailMust("Mallory "), + ), + MailHeaderSubject: nullable.NewNullableWithValue( + *vo.NewOptionalString255Must("SubjectLine"), + ), + Content: nullable.NewNullableWithValue( + *vo.NewOptionalString1MBMust("Content"), + ), + AddTrackingPixel: nullable.NewNullableWithValue(true), + } +} + +// EmailOverview is a e-mail model without content and attachments +type EmailOverview struct { + ID nullable.Nullable[uuid.UUID] `json:"id"` + CreatedAt *time.Time `json:"createdAt"` + UpdatedAt *time.Time `json:"updatedAt"` + Name nullable.Nullable[vo.String64] `json:"name"` + MailEnvelopeFrom nullable.Nullable[vo.MailEnvelopeFrom] `json:"mailEnvelopeFrom"` // Bounce / Return-Path + MailHeaderFrom nullable.Nullable[vo.Email] `json:"mailHeaderFrom"` + MailHeaderSubject nullable.Nullable[vo.OptionalString255] `json:"mailHeaderSubject"` + AddTrackingPixel nullable.Nullable[bool] `json:"addTrackingPixel"` + CompanyID nullable.Nullable[uuid.UUID] `json:"companyID"` + + Company *Company `json:"company"` +} diff --git a/backend/model/identifier.go b/backend/model/identifier.go new file mode 100644 index 0000000..bdeddd9 --- /dev/null +++ b/backend/model/identifier.go @@ -0,0 +1,27 @@ +package model + +import ( + "github.com/google/uuid" + "github.com/oapi-codegen/nullable" + "github.com/phishingclub/phishingclub/validate" +) + +type Identifier struct { + ID nullable.Nullable[*uuid.UUID] `json:"id"` + Name nullable.Nullable[string] `json:"name"` +} + +func (i *Identifier) Validate() error { + if err := validate.NullableFieldRequired("name", i.Name); err != nil { + return err + } + return nil +} + +func (i *Identifier) ToDBMap() map[string]any { + m := make(map[string]any) + if v, err := i.Name.Get(); err == nil { + m["name"] = v + } + return m +} diff --git a/backend/model/option.go b/backend/model/option.go new file mode 100644 index 0000000..6f9d78d --- /dev/null +++ b/backend/model/option.go @@ -0,0 +1,14 @@ +package model + +import ( + "github.com/google/uuid" + "github.com/oapi-codegen/nullable" + "github.com/phishingclub/phishingclub/vo" +) + +// Option is an Option +type Option struct { + ID nullable.Nullable[uuid.UUID] `json:"id"` + Key vo.String64 `json:"key"` + Value vo.OptionalString1MB `json:"value"` +} diff --git a/backend/model/page.go b/backend/model/page.go new file mode 100644 index 0000000..ce6348d --- /dev/null +++ b/backend/model/page.go @@ -0,0 +1,60 @@ +package model + +import ( + "time" + + "github.com/google/uuid" + "github.com/oapi-codegen/nullable" + "github.com/phishingclub/phishingclub/validate" + "github.com/phishingclub/phishingclub/vo" +) + +// Page is a Page +type Page struct { + ID nullable.Nullable[uuid.UUID] `json:"id"` + CreatedAt *time.Time `json:"createdAt"` + UpdatedAt *time.Time `json:"updatedAt"` + CompanyID nullable.Nullable[uuid.UUID] `json:"companyID"` + Name nullable.Nullable[vo.String64] `json:"name"` + Content nullable.Nullable[vo.OptionalString1MB] `json:"content"` + + Company *Company `json:"-"` +} + +// Validate checks if the page has a valid state +func (p *Page) Validate() error { + if err := validate.NullableFieldRequired("name", p.Name); err != nil { + return err + } + if err := validate.NullableFieldRequired("content", p.Content); err != nil { + return err + } + return nil +} + +// ToDBMap converts the fields that can be stored or updated to a map +// if the value is nullable and not set, it is not included +// if the value is nullable and set, it is included, if it is null, it is set to nil +func (p *Page) ToDBMap() map[string]any { + m := map[string]any{} + if p.Name.IsSpecified() { + m["name"] = nil + if name, err := p.Name.Get(); err == nil { + m["name"] = name.String() + } + } + if p.Content.IsSpecified() { + m["content"] = nil + if content, err := p.Content.Get(); err == nil { + m["content"] = content.String() + } + } + if p.CompanyID.IsSpecified() { + if p.CompanyID.IsNull() { + m["company_id"] = nil + } else { + m["company_id"] = p.CompanyID.MustGet() + } + } + return m +} diff --git a/backend/model/recipient.go b/backend/model/recipient.go new file mode 100644 index 0000000..c713949 --- /dev/null +++ b/backend/model/recipient.go @@ -0,0 +1,221 @@ +package model + +import ( + "time" + + "github.com/google/uuid" + "github.com/oapi-codegen/nullable" + "github.com/phishingclub/phishingclub/validate" + "github.com/phishingclub/phishingclub/vo" +) + +// Recipient is a Recipient +type Recipient struct { + ID nullable.Nullable[uuid.UUID] `json:"id"` + CreatedAt *time.Time `json:"createdAt"` + UpdatedAt *time.Time `json:"updatedAt"` + Email nullable.Nullable[vo.Email] `json:"email"` + Phone nullable.Nullable[vo.OptionalString127] `json:"phone"` + ExtraIdentifier nullable.Nullable[vo.OptionalString127] `json:"extraIdentifier"` + FirstName nullable.Nullable[vo.OptionalString127] `json:"firstName"` + LastName nullable.Nullable[vo.OptionalString127] `json:"lastName"` + Position nullable.Nullable[vo.OptionalString127] `json:"position"` + Department nullable.Nullable[vo.OptionalString127] `json:"department"` + City nullable.Nullable[vo.OptionalString127] `json:"city"` + Country nullable.Nullable[vo.OptionalString127] `json:"country"` + Misc nullable.Nullable[vo.OptionalString127] `json:"misc"` + CompanyID nullable.Nullable[uuid.UUID] `json:"companyID"` + + Company *Company `json:"-"` + Groups nullable.Nullable[[]*RecipientGroup] `json:"groups"` +} + +// Validate checks if the recipient has a valid state +func (r *Recipient) Validate() error { + if err := validate.NullableFieldRequired("email", r.Email); err != nil { + return err + } + return nil +} + +// NullifyEmptyOptionals sets empty values to a nullable null, so they are not overwritten +func (r *Recipient) NullifyEmptyOptionals() { + if r.Phone.IsSpecified() && !r.Phone.IsNull() && r.Phone.MustGet().String() == "" { + r.Phone.SetNull() + } + if r.ExtraIdentifier.IsSpecified() && !r.ExtraIdentifier.IsNull() && r.ExtraIdentifier.MustGet().String() == "" { + r.ExtraIdentifier.SetNull() + } + if r.FirstName.IsSpecified() && !r.FirstName.IsNull() && r.FirstName.MustGet().String() == "" { + r.FirstName.SetNull() + } + + if r.LastName.IsSpecified() && !r.LastName.IsNull() && r.LastName.MustGet().String() == "" { + r.LastName.SetNull() + } + if r.Position.IsSpecified() && !r.Position.IsNull() && r.Position.MustGet().String() == "" { + r.Position.SetNull() + } + if r.Department.IsSpecified() && !r.Department.IsNull() && r.Department.MustGet().String() == "" { + r.Department.SetNull() + } + if r.City.IsSpecified() && !r.City.IsNull() && r.City.MustGet().String() == "" { + r.City.SetNull() + } + if r.Country.IsSpecified() && !r.Country.IsNull() && r.Country.MustGet().String() == "" { + r.Country.SetNull() + } + if r.Misc.IsSpecified() && !r.Misc.IsNull() && r.Misc.MustGet().String() == "" { + r.Misc.SetNull() + } +} + +// EmptyStringNulledOptionals sets nulled optional values to a empty string or zero value. +func (r *Recipient) EmptyStringNulledOptionals() { + if r.Phone.IsSpecified() && r.Phone.IsNull() { + r.Phone.Set(*vo.NewOptionalString127Must("")) + } + if r.ExtraIdentifier.IsSpecified() && r.ExtraIdentifier.IsNull() { + r.ExtraIdentifier.Set(*vo.NewOptionalString127Must("")) + } + if r.FirstName.IsSpecified() && r.FirstName.IsNull() { + r.FirstName.Set(*vo.NewOptionalString127Must("")) + } + if r.LastName.IsSpecified() && r.LastName.IsNull() { + r.LastName.Set(*vo.NewOptionalString127Must("")) + } + if r.Position.IsSpecified() && r.Position.IsNull() { + r.Position.Set(*vo.NewOptionalString127Must("")) + } + if r.Department.IsSpecified() && r.Department.IsNull() { + r.Department.Set(*vo.NewOptionalString127Must("")) + } + if r.City.IsSpecified() && r.City.IsNull() { + r.City.Set(*vo.NewOptionalString127Must("")) + } + if r.Country.IsSpecified() && r.Country.IsNull() { + r.Country.Set(*vo.NewOptionalString127Must("")) + } + if r.Misc.IsSpecified() && r.Misc.IsNull() { + r.Misc.Set(*vo.NewOptionalString127Must("")) + } +} + +// ToDBMap converts the fields that can be stored or updated to a map +// if the value is nullable and not set, it is not included +// if the value is nullable and set, it is included, if it is null, it is set to nil +func (r *Recipient) ToDBMap() map[string]any { + m := map[string]any{} + if r.Email.IsSpecified() { + m["email"] = nil + if email, err := r.Email.Get(); err == nil { + if email.String() == "" { + m["email"] = nil // due to the unique constraint + } else { + m["email"] = email.String() + } + } + } + if r.Phone.IsSpecified() { + m["phone"] = nil + if phone, err := r.Phone.Get(); err == nil { + if phone.String() == "" { + m["phone"] = nil // due to the unique constraint + } else { + m["phone"] = phone.String() + } + } + } + if r.ExtraIdentifier.IsSpecified() { + m["extra_identifier"] = nil + if extraIdentifier, err := r.ExtraIdentifier.Get(); err == nil { + if extraIdentifier.String() == "" { + m["extra_identifier"] = nil // due to the unique constraint + } else { + m["extra_identifier"] = extraIdentifier.String() + } + } + } + if r.FirstName.IsSpecified() { + m["first_name"] = nil + if firstName, err := r.FirstName.Get(); err == nil { + m["first_name"] = firstName.String() + } + } + if r.LastName.IsSpecified() { + m["last_name"] = nil + if lastName, err := r.LastName.Get(); err == nil { + m["last_name"] = lastName.String() + } + } + if r.Position.IsSpecified() { + m["position"] = nil + if position, err := r.Position.Get(); err == nil { + m["position"] = position.String() + } + } + if r.Department.IsSpecified() { + m["department"] = nil + if department, err := r.Department.Get(); err == nil { + m["department"] = department.String() + } + } + if r.City.IsSpecified() { + m["city"] = nil + if city, err := r.City.Get(); err == nil { + m["city"] = city.String() + } + } + if r.Country.IsSpecified() { + m["country"] = nil + if country, err := r.Country.Get(); err == nil { + m["country"] = country.String() + } + } + if r.Misc.IsSpecified() { + m["misc"] = nil + if misc, err := r.Misc.Get(); err == nil { + m["misc"] = misc.String() + } + } + if r.CompanyID.IsSpecified() { + if r.CompanyID.IsNull() { + m["company_id"] = nil + } else { + m["company_id"] = r.CompanyID.MustGet() + } + } + return m +} + +func NewRecipientExample() *Recipient { + return &Recipient{ + Email: nullable.NewNullableWithValue( + *vo.NewEmailMust("Rick "), + ), + Phone: nullable.NewNullableWithValue( + *vo.NewOptionalString127Must("1234567890"), + ), + ExtraIdentifier: nullable.NewNullableWithValue( + *vo.NewOptionalString127Must("ExtraIdentifier"), + ), + FirstName: nullable.NewNullableWithValue( + *vo.NewOptionalString127Must("Rick"), + ), + LastName: nullable.NewNullableWithValue( + *vo.NewOptionalString127Must("Xanders"), + ), + Position: nullable.NewNullableWithValue( + *vo.NewOptionalString127Must("CEO"), + ), + Department: nullable.NewNullableWithValue( + *vo.NewOptionalString127Must("IT"), + ), + City: nullable.NewNullableWithValue( + *vo.NewOptionalString127Must("Fredericia"), + ), + Country: nullable.NewNullableWithValue( + *vo.NewOptionalString127Must("Denmark"), + ), + } +} diff --git a/backend/model/recipientCampaignEventView.go b/backend/model/recipientCampaignEventView.go new file mode 100644 index 0000000..97a1e24 --- /dev/null +++ b/backend/model/recipientCampaignEventView.go @@ -0,0 +1,9 @@ +package model + +type RecipientCampaignEvent struct { + CampaignEvent + + // event name + Name string `json:"name"` + CampaignName string `json:"campaignName"` +} diff --git a/backend/model/recipientCampaignStatsView.go b/backend/model/recipientCampaignStatsView.go new file mode 100644 index 0000000..d40d975 --- /dev/null +++ b/backend/model/recipientCampaignStatsView.go @@ -0,0 +1,10 @@ +package model + +type RecipientCampaignStatsView struct { + CampaignsParticiated int64 `json:"campaignsParticiated"` + CampaignsTrackingPixelLoaded int64 `json:"campaignsTrackingPixelLoaded"` + CampaignsPhishingPageLoaded int64 `json:"campaignsPhishingPageLoaded"` + CampaignsDataSubmitted int64 `json:"campaignsDataSubmitted"` + RepeatLinkClicks int64 `json:"repeatLinkClicks"` + RepeatSubmissions int64 `json:"repeatSubmissions"` +} diff --git a/backend/model/recipientGroup.go b/backend/model/recipientGroup.go new file mode 100644 index 0000000..6db59d0 --- /dev/null +++ b/backend/model/recipientGroup.go @@ -0,0 +1,55 @@ +package model + +import ( + "time" + + "github.com/google/uuid" + "github.com/oapi-codegen/nullable" + "github.com/phishingclub/phishingclub/validate" + "github.com/phishingclub/phishingclub/vo" +) + +const RECIPIENT_COUNT_NOT_LOADED = int64(-1) +const RECIPIENT_GROUP_COUNT_NOT_LOADED = int64(-1) + +// RecipientGroup is an entity for recipient group +type RecipientGroup struct { + ID nullable.Nullable[uuid.UUID] `json:"id"` + CreatedAt *time.Time `json:"createdAt"` + UpdatedAt *time.Time `json:"updatedAt"` + Name nullable.Nullable[vo.String127] `json:"name"` + CompanyID nullable.Nullable[uuid.UUID] `json:"companyID"` + + Recipients []*Recipient `json:"-"` + IsRecipientsLoaded bool `json:"-"` + RecipientCount nullable.Nullable[int64] `json:"recipientCount"` + IsRecipientCountLoaded bool `json:"-"` + Company *Company `json:"-"` +} + +// Validate checks if the recipient group has a valid state +func (rg *RecipientGroup) Validate() error { + if err := validate.NullableFieldRequired("name", rg.Name); err != nil { + return err + } + return nil +} + +// ToDBMap converts the fields that can be stored or updated to a map +// if the value is nullable and not set, it is not included +// if the value is nullable and set, it is included, if it is null, it is set to nil +func (rg *RecipientGroup) ToDBMap() map[string]any { + m := map[string]any{} + if rg.Name.IsSpecified() { + name := rg.Name.MustGet() + m["name"] = name.String() + } + if rg.CompanyID.IsSpecified() { + if rg.CompanyID.IsNull() { + m["company_id"] = nil + } else { + m["company_id"] = rg.CompanyID.MustGet() + } + } + return m +} diff --git a/backend/model/recipientGroupRecipient.go b/backend/model/recipientGroupRecipient.go new file mode 100644 index 0000000..2320a8c --- /dev/null +++ b/backend/model/recipientGroupRecipient.go @@ -0,0 +1,9 @@ +package model + +import "github.com/google/uuid" + +type RecipientGroupRecipient struct { + ID *uuid.UUID + RecipientID *uuid.UUID + RecipientGroupID *uuid.UUID +} diff --git a/backend/model/recipientView.go b/backend/model/recipientView.go new file mode 100644 index 0000000..e807523 --- /dev/null +++ b/backend/model/recipientView.go @@ -0,0 +1,15 @@ +package model + +// RecipientView extends Recipient with additional presentation fields +type RecipientView struct { + *Recipient // Embed the base Recipient model + IsRepeatOffender bool `json:"isRepeatOffender"` +} + +// NewRecipientView creates a RecipientView from a Recipient +func NewRecipientView(r *Recipient) *RecipientView { + return &RecipientView{ + Recipient: r, + IsRepeatOffender: false, + } +} diff --git a/backend/model/result.go b/backend/model/result.go new file mode 100644 index 0000000..3476d35 --- /dev/null +++ b/backend/model/result.go @@ -0,0 +1,21 @@ +package model + +type Result[T any] struct { + Rows []*T `json:"rows"` + HasNextPage bool `json:"hasNextPage"` +} + +func NewResult[T any](rows []*T) *Result[T] { + return &Result[T]{ + Rows: rows, + HasNextPage: false, + } +} + +func NewEmptyResult[T any]() *Result[T] { + t := []*T{} + return &Result[T]{ + Rows: t, + HasNextPage: false, + } +} diff --git a/backend/model/role.go b/backend/model/role.go new file mode 100644 index 0000000..a03d816 --- /dev/null +++ b/backend/model/role.go @@ -0,0 +1,69 @@ +package model + +import ( + "fmt" + + "github.com/google/uuid" + "github.com/phishingclub/phishingclub/data" +) + +// Role is user role and defines the permissions of the user +type Role struct { + ID uuid.UUID `json:"id"` + Name string `json:"name"` +} + +// Validate checks if the role has a valid state +func (r *Role) Validate() error { + if err := validateRoleName(r.Name); err != nil { + return err + } + return nil +} + +// ToDBMap converts the fields that can be stored or updated to a map +// if the value is nullable and not set, it is not included +// if the value is nullable and set, it is included, if it is null, it is set to nil +func (r *Role) ToDBMap() map[string]any { + m := map[string]any{} + if r.Name != "" { + m["name"] = r.Name + } + return m +} + +func (r *Role) IsAuthorized(permission string) bool { + perms := r.Permissions() + for _, perm := range perms { + if perm == permission { + return true + } + } + return false +} + +// Permissions gets the permissions of the role +func (r *Role) Permissions() []string { + perms, ok := data.RolePermissions[r.Name] + if !ok { + return []string{} + } + return perms +} + +// IsSuperAdministrator checks if the role is a super administrator +func (r *Role) IsSuperAdministrator() bool { + return r.Name == data.RoleSuperAdministrator +} + +func validateRoleName(name string) error { + // ensure only valid role names are used + switch name { + case data.RoleSystem: + case data.RoleSuperAdministrator: + case data.RoleCompanyUser: + default: + return fmt.Errorf("invalid role name: %s", name) + } + return nil +} diff --git a/backend/model/session.go b/backend/model/session.go new file mode 100644 index 0000000..96cdaa5 --- /dev/null +++ b/backend/model/session.go @@ -0,0 +1,81 @@ +package model + +import ( + "time" + + "github.com/google/uuid" + "github.com/phishingclub/phishingclub/data" + "github.com/phishingclub/phishingclub/errs" +) + +// todo move this to a global place like data or a config of sorts +const ( + SessionIdleTimeout = 24 * time.Hour + SessionMaxAgeAt = 24 * 3 * time.Hour +) + +// used in runtime for API requests +// api session is only for the single request, more like a api session contex +type APISession struct { + IP string + UserID *uuid.UUID +} + +// Session reprensents a user session +// no Validate or ToDBMap as it is never created from user input +type Session struct { + ID *uuid.UUID `json:"id,omitempty"` + CreatedAt *time.Time `json:"createdAt"` + UpdatedAt *time.Time `json:"updatedAt"` + ExpiresAt *time.Time `json:"expiresAt"` + MaxAgeAt *time.Time `json:"maxAgeAt"` + IP string `json:"ip"` + User *User `json:"user"` + IsUserLoaded bool `json:"-"` + IsAPITokenRequest bool +} + +// NewSystemSession creates a new system session +func NewSystemSession() (*Session, error) { + id, err := uuid.Parse(data.SystemSessionID) + if err != nil { + return nil, errs.Wrap(err) + } + now := time.Now() + longTimeFromNow := now.Add(time.Duration(420 * time.Now().Year())).UTC() + expiresAt := &longTimeFromNow + maxAgeAt := &longTimeFromNow + user, err := NewSystemUser() + if err != nil { + return nil, errs.Wrap(err) + } + return &Session{ + ID: &id, + ExpiresAt: expiresAt, + MaxAgeAt: maxAgeAt, + IP: "127.0.0.1", + User: user, + IsUserLoaded: true, + }, nil + +} + +// Renew updates the session +func (s *Session) Renew(lease time.Duration) { + now := time.Now().UTC() + expiresAt := now.Add(lease) + s.ExpiresAt = &expiresAt +} + +// IsExpired returns true if the session is expired +func (s *Session) IsExpired() bool { + // is total lifetime over max lifetime? + if time.Now().After(*s.MaxAgeAt) { + return true + } + // is idle timeout over? + if time.Now().After(*s.ExpiresAt) { + return true + } + return false +} diff --git a/backend/model/smtpConfiguration.go b/backend/model/smtpConfiguration.go new file mode 100644 index 0000000..61a1898 --- /dev/null +++ b/backend/model/smtpConfiguration.go @@ -0,0 +1,141 @@ +package model + +import ( + "time" + + "github.com/google/uuid" + "github.com/oapi-codegen/nullable" + "github.com/phishingclub/phishingclub/validate" + "github.com/phishingclub/phishingclub/vo" +) + +// SMTPConfiguration is a configuration for sending mails +type SMTPConfiguration struct { + ID nullable.Nullable[uuid.UUID] `json:"id"` + CreatedAt *time.Time `json:"createdAt"` + UpdatedAt *time.Time `json:"updatedAt"` + Name nullable.Nullable[vo.String127] `json:"name"` + Host nullable.Nullable[vo.String255] `json:"host"` + Port nullable.Nullable[vo.Port] `json:"port"` + Username nullable.Nullable[vo.OptionalString255] `json:"username"` + Password nullable.Nullable[vo.OptionalString255] `json:"password"` + IgnoreCertErrors nullable.Nullable[bool] `json:"ignoreCertErrors"` + CompanyID nullable.Nullable[uuid.UUID] `json:"companyID"` + Company *Company `json:"company"` + Headers []*SMTPHeader `json:"headers"` +} + +// Validate checks if the SMTP configuration has a valid state +func (s *SMTPConfiguration) Validate() error { + if err := validate.NullableFieldRequired("name", s.Name); err != nil { + return err + } + if err := validate.NullableFieldRequired("host", s.Host); err != nil { + return err + } + if err := validate.NullableFieldRequired("port", s.Port); err != nil { + return err + } + if err := validate.NullableFieldRequired("username", s.Username); err != nil { + return err + } + if err := validate.NullableFieldRequired("password", s.Password); err != nil { + return err + } + if err := validate.NullableFieldRequired("ignoreCertErrors", s.IgnoreCertErrors); err != nil { + return err + } + return nil +} + +// ToDBMap converts the fields that can be stored or updated to a map +// if the value is nullable and not set, it is not included +// if the value is nullable and set, it is included, if it is null, it is set to nil +func (s *SMTPConfiguration) ToDBMap() map[string]any { + m := map[string]any{} + if s.Name.IsSpecified() { + m["name"] = nil + if name, err := s.Name.Get(); err == nil { + m["name"] = name.String() + } + } + if s.Host.IsSpecified() { + m["host"] = nil + if host, err := s.Host.Get(); err == nil { + m["host"] = host.String() + } + } + if s.Port.IsSpecified() { + m["port"] = nil + if port, err := s.Port.Get(); err == nil { + m["port"] = port.Uint16() + } + } + if s.Username.IsSpecified() { + m["username"] = nil + if username, err := s.Username.Get(); err == nil { + m["username"] = username.String() + } + } + if s.Password.IsSpecified() { + m["password"] = nil + if password, err := s.Password.Get(); err == nil { + m["password"] = password.String() + } + } + if s.IgnoreCertErrors.IsSpecified() { + m["ignore_cert_errors"] = nil + if ignoreCertErrors, err := s.IgnoreCertErrors.Get(); err == nil { + m["ignore_cert_errors"] = ignoreCertErrors + } + } + if v, err := s.CompanyID.Get(); err == nil { + m["company_id"] = v.String() + } + return m +} + +// SMTPHeader is a header for a specific SMTP configuration +type SMTPHeader struct { + ID uuid.UUID `json:"id"` + CreatedAt *time.Time `json:"createdAt"` + UpdatedAt *time.Time `json:"updatedAt"` + SmtpID nullable.Nullable[uuid.UUID] `json:"smtpID"` + Key nullable.Nullable[vo.String127] `json:"key"` + Value nullable.Nullable[vo.String255] `json:"value"` +} + +func (s *SMTPHeader) Validate() error { + if err := validate.NullableFieldRequired("smtpID", s.SmtpID); err != nil { + return err + } + if err := validate.NullableFieldRequired("key", s.Key); err != nil { + return err + } + if err := validate.NullableFieldRequired("value", s.Value); err != nil { + return err + } + return nil +} + +func (s *SMTPHeader) ToDBMap() map[string]interface{} { + m := map[string]interface{}{} + if s.SmtpID.IsSpecified() { + if smtpID, err := s.SmtpID.Get(); err == nil { + m["smtp_configuration_id"] = smtpID.String() + } + } + if s.Key.IsSpecified() { + m["key"] = nil + if key, err := s.Key.Get(); err == nil { + m["key"] = key.String() + } + } + if s.Value.IsSpecified() { + m["value"] = nil + if value, err := s.Value.Get(); err == nil { + m["value"] = value.String() + } + } + return m +} diff --git a/backend/model/ssoOption.go b/backend/model/ssoOption.go new file mode 100644 index 0000000..89dddc2 --- /dev/null +++ b/backend/model/ssoOption.go @@ -0,0 +1,80 @@ +package model + +import ( + "encoding/json" + "fmt" + + "github.com/go-errors/errors" + + "github.com/phishingclub/phishingclub/data" + "github.com/phishingclub/phishingclub/errs" + "github.com/phishingclub/phishingclub/validate" + "github.com/phishingclub/phishingclub/vo" +) + +type SSOOption struct { + Enabled bool `json:"enabled"` + ClientID vo.OptionalString64 `json:"clientID"` + TenantID vo.OptionalString64 `json:"tenantID"` + ClientSecret vo.OptionalString1024 `json:"clientSecret"` + RedirectURL vo.OptionalString1024 `json:"redirectURL"` +} + +func NewSSOOptionDefault() *SSOOption { + return &SSOOption{ + Enabled: false, + ClientID: *vo.NewEmptyOptionalString64(), + TenantID: *vo.NewEmptyOptionalString64(), + ClientSecret: *vo.NewEmptyOptionalString1024(), + RedirectURL: *vo.NewEmptyOptionalString1024(), + } +} + +func NewSSOOptionFromJSON(jsonData []byte) (*SSOOption, error) { + option := &SSOOption{} + err := json.Unmarshal(jsonData, option) + if err != nil { + return nil, validate.WrapErrorWithField( + errs.NewValidationError( + errors.New("invalid format"), + ), + "Option", + ) + } + return option, nil +} + +func NewSSOOptionFromOption(option *Option) (*SSOOption, error) { + if option == nil { + return nil, fmt.Errorf("option cannot be nil") + } + ssooption, err := NewSSOOptionFromJSON([]byte(option.Value.String())) + if err != nil { + return nil, validate.WrapErrorWithField( + errs.NewValidationError( + errors.New("invalid format"), + ), + "SSOOption", + ) + } + return ssooption, nil +} + +func (l *SSOOption) ToJSON() ([]byte, error) { + return json.Marshal(l) +} + +func (l *SSOOption) ToOption() (*Option, error) { + json, err := l.ToJSON() + if err != nil { + return nil, errs.Wrap(err) + } + str, err := vo.NewOptionalString1MB(string(json)) + if err != nil { + return nil, errs.Wrap(err) + } + return &Option{ + Key: *vo.NewString64Must(data.OptionKeyAdminSSOLogin), + Value: *str, + }, nil +} diff --git a/backend/model/user.go b/backend/model/user.go new file mode 100644 index 0000000..3925ffd --- /dev/null +++ b/backend/model/user.go @@ -0,0 +1,153 @@ +package model + +import ( + "time" + + "github.com/google/uuid" + "github.com/oapi-codegen/nullable" + "github.com/phishingclub/phishingclub/data" + "github.com/phishingclub/phishingclub/validate" + "github.com/phishingclub/phishingclub/vo" +) + +const SYSTEM_USER_ID = "3eb19071-fbbb-4736-9991-02ba532a7849" + +// User is a user of the system, including the company and role +type User struct { + ID nullable.Nullable[uuid.UUID] `json:"id"` + CreatedAt *time.Time `json:"createdAt"` + UpdatedAt *time.Time `json:"updatedAt"` + Name nullable.Nullable[vo.UserFullname] `json:"name"` + Username nullable.Nullable[vo.Username] `json:"username"` + Email nullable.Nullable[vo.Email] `json:"email"` + RequirePasswordRenew nullable.Nullable[bool] `json:"requirePasswordRenew"` + CompanyID nullable.Nullable[uuid.UUID] `json:"companyID"` + Company *Company `json:"company"` + RoleID nullable.Nullable[uuid.UUID] `json:"roleID"` + Role *Role `json:"role"` + SSOID nullable.Nullable[string] `json:"ssoID"` + // apiKey is only get/set externally from this and never output except when created +} + +// Validate checks if the user has a valid state +func (u *User) Validate() error { + if err := validate.NullableFieldRequired("name", u.Name); err != nil { + return err + } + if err := validate.NullableFieldRequired("username", u.Username); err != nil { + return err + } + if err := validate.NullableFieldRequired("email", u.Email); err != nil { + return err + } + return nil +} + +// ToDBMap converts the fields that can be stored or updated to a map +// if the value is nullable and not set, it is not included +// if the value is nullable and set, it is included, if it is null, it is set to nil +func (u *User) ToDBMap() map[string]any { + m := map[string]any{} + if u.Name.IsSpecified() { + m["name"] = nil + if name, err := u.Name.Get(); err == nil { + m["name"] = name.String() + } + } + if u.Username.IsSpecified() { + m["username"] = nil + if username, err := u.Username.Get(); err == nil { + m["username"] = username.String() + } + } + if u.Email.IsSpecified() { + m["email"] = nil + if email, err := u.Email.Get(); err == nil { + m["email"] = email.String() + } + } + if u.RequirePasswordRenew.IsSpecified() { + m["require_password_renew"] = nil + if requirePasswordRenew, err := u.RequirePasswordRenew.Get(); err == nil { + m["require_password_renew"] = requirePasswordRenew + } + } + if u.CompanyID.IsSpecified() { + m["company_id"] = nil + if companyID, err := u.CompanyID.Get(); err == nil { + m["company_id"] = companyID + } + } + if u.RoleID.IsSpecified() { + m["role_id"] = nil + if roleID, err := u.RoleID.Get(); err == nil { + m["role_id"] = roleID + } + } + + if u.SSOID.IsSpecified() { + m["sso_id"] = nil + if ssoID, err := u.SSOID.Get(); err == nil { + m["sso_id"] = ssoID + } + } + return m +} + +// UserUpsertRequest is a request for creating a new user +type UserUpsertRequest struct { + Username vo.Username `json:"username"` + Password vo.ReasonableLengthPassword `json:"password"` + Email vo.Email `json:"email"` + Fullname vo.UserFullname `json:"fullname"` +} + +// UserChangeEmailRequest is a request for changing the email of a user +type UserChangeEmailRequest struct { + Email vo.Email `json:"email"` +} + +// UserChangeFullnameRequest is the change fullname request +type UserChangeFullnameRequest struct { + NewFullname vo.UserFullname `json:"fullname"` +} + +type InvalidateAllSessionRequest struct { + UserID *uuid.UUID `json:"userID"` +} + +// UserChangePasswordRequest is a request for changing password +type UserChangePasswordRequest struct { + CurrentPassword vo.ReasonableLengthPassword `json:"currentPassword" binding:"required"` + NewPassword vo.ReasonableLengthPassword `json:"newPassword" binding:"required"` +} + +// UserChangeUsernameOnLoggedInRequest is the change username request +type UserChangeUsernameOnLoggedInRequest struct { + NewUsername vo.Username `json:"username"` +} + +// NewUser creates a new user which is used for internal system actions and +// cant not be used to login or by a human. +func NewSystemUser() (*User, error) { + role := &Role{ + Name: data.RoleSystem, + } + id := uuid.MustParse(SYSTEM_USER_ID) + return &User{ + ID: nullable.NewNullableWithValue(id), + Name: nullable.NewNullableWithValue(*vo.NewUserFullnameMust("system")), + Username: nullable.NewNullableWithValue(*vo.NewUsernameMust("system")), + Email: nullable.NewNullableWithValue( + *vo.NewEmailMust("system@example.com"), + ), + RequirePasswordRenew: nullable.NewNullableWithValue(false), + Company: nil, + Role: role, + }, nil +} + +type APIUser struct { + APIKeyHash [32]byte + ID *uuid.UUID +} diff --git a/backend/model/webhook.go b/backend/model/webhook.go new file mode 100644 index 0000000..6b07ad1 --- /dev/null +++ b/backend/model/webhook.go @@ -0,0 +1,61 @@ +package model + +import ( + "time" + + "github.com/google/uuid" + "github.com/oapi-codegen/nullable" + "github.com/phishingclub/phishingclub/validate" + "github.com/phishingclub/phishingclub/vo" +) + +// Webhook is a gorm data model for webhooks +type Webhook struct { + ID nullable.Nullable[uuid.UUID] `json:"id"` + CreatedAt *time.Time `json:"createdAt"` + UpdatedAt *time.Time `json:"updatedAt"` + CompanyID nullable.Nullable[uuid.UUID] `json:"companyID"` + Name nullable.Nullable[vo.String127] `json:"name"` + URL nullable.Nullable[vo.String1024] `json:"url"` + Secret nullable.Nullable[vo.OptionalString1024] `json:"secret"` +} + +// Validate runs the validations for this struct +func (w *Webhook) Validate() error { + if err := validate.NullableFieldRequired("name", w.Name); err != nil { + return err + } + if err := validate.NullableFieldRequired("secret", w.URL); err != nil { + return err + } + return nil +} + +// ToDBMap converts the fields that can be stored or updated to a map +// if the value is nullable and not set, it is not included +// if the value is nullable and set, it is included, if it is null, it is set to nil +func (w *Webhook) ToDBMap() map[string]any { + m := map[string]any{} + if w.Name.IsSpecified() { + m["name"] = nil + if name, err := w.Name.Get(); err == nil { + m["name"] = name.String() + } + } + if w.URL.IsSpecified() { + m["url"] = nil + if url, err := w.URL.Get(); err == nil { + m["url"] = url.String() + } + } + if w.Secret.IsSpecified() { + m["secret"] = nil + if secret, err := w.Secret.Get(); err == nil { + m["secret"] = secret.String() + } + } + if v, err := w.CompanyID.Get(); err == nil { + m["company_id"] = v.String() + } + return m +} diff --git a/backend/password/hashing.go b/backend/password/hashing.go new file mode 100644 index 0000000..3bda4f4 --- /dev/null +++ b/backend/password/hashing.go @@ -0,0 +1,143 @@ +package password + +import ( + "crypto/subtle" + "encoding/base64" + "fmt" + "strconv" + "strings" + + "github.com/go-errors/errors" + + "github.com/phishingclub/phishingclub/errs" + "github.com/phishingclub/phishingclub/random" + "golang.org/x/crypto/argon2" +) + +const ( + // DummyHash is used for negating timing attacks on login. + // If the user it not found, this hash is used to compare the password against. + // This will ensure that the response time is closer to the response time when the user is found. + // If the DefaultArgon* values are changed, this value must be updated. + DummyHash = "argon2id$v=19$m=65536,t=2,p=1$yUvXmqefdwzTurx0pMH3i4NLaHJ57sdkIvrHtWwuh7I$rWsAgfbeXsNzU5+CTtJ5oelaS+YDlgO3UTqna/jZFskWZAojCGjuAV8KYHHrztYy9/FbFNsdvyOrujNzqGFCWQ" +) + +const ( + DefaultArgon2Memory = 64 * 1024 + DefaultArgon2Iterations = 2 + DefaultArgon2Parallelism = 1 + DefaultArgon2KeyLen = 64 +) + +// Argon2Hasher is a hasher for password. +type Argon2Hasher struct { + memory uint32 + iterations uint32 + parallelism uint8 + keyLen uint32 +} + +// NewHasher creates a new hasher. +func NewHasher(memory uint32, iterations uint32, parallelism uint8, keyLen uint32) *Argon2Hasher { + return &Argon2Hasher{ + memory: memory, + iterations: iterations, + parallelism: parallelism, + keyLen: keyLen, + } +} + +// NewHasherWithDefaultValues creates a new hasher with default values. +func NewHasherWithDefaultValues() *Argon2Hasher { + return &Argon2Hasher{ + memory: DefaultArgon2Memory, + iterations: DefaultArgon2Iterations, + parallelism: DefaultArgon2Parallelism, + keyLen: DefaultArgon2KeyLen, + } +} + +// Hash hashes the password using Argon2. +func (h *Argon2Hasher) Hash(password string) (string, error) { + saltBytes, err := random.GenerateRandomBytes(32) + if err != nil { + return "", errs.Wrap(err) + } + passwordBytes := []byte(password) + hashBytes := argon2.IDKey(passwordBytes, saltBytes, h.iterations, h.memory, h.parallelism, h.keyLen) + hashString := fmt.Sprintf("argon2id$v=%d$m=%d,t=%d,p=%d$%s$%s", + argon2.Version, h.memory, h.iterations, h.parallelism, + base64.RawStdEncoding.EncodeToString(saltBytes), + base64.RawStdEncoding.EncodeToString(hashBytes)) + return hashString, nil +} + +type Argon2Params struct { + Time uint32 + Memory uint32 + Parallelism uint8 + KeyLen uint32 +} +type Argon2Verifier struct{} + +func NewArgon2Verifier() *Argon2Verifier { + return &Argon2Verifier{} +} + +// Verify verifies a password against an Argon2 hash. +func (v *Argon2Verifier) Verify(password string, encodedHash string) (bool, error) { + params, salt, hash, err := decodeHash(encodedHash) + if err != nil { + return false, errs.Wrap(err) + } + computedHash := argon2.IDKey([]byte(password), salt, params.Time, params.Memory, params.Parallelism, params.KeyLen) + return subtle.ConstantTimeCompare(hash, computedHash) == 1, nil +} + +// decodeHash decodes an Argon2 hash string. +func decodeHash(encodedHash string) (Argon2Params, []byte, []byte, error) { + parts := strings.Split(encodedHash, "$") + if len(parts) != 5 { + return Argon2Params{}, nil, nil, fmt.Errorf("invalid hash format: '%s' - has %d parts", encodedHash, len(parts)) + } + + params := Argon2Params{} + var err error + + paramParts := strings.Split(parts[2], ",") + if len(paramParts) != 3 { + return Argon2Params{}, nil, nil, errors.New("invalid hash format: failed to parse parameters") + } + + memoryVal, err := strconv.ParseUint(paramParts[0][2:], 10, 32) + if err != nil { + return Argon2Params{}, nil, nil, errors.New("invalid hash format: failed to parse memory") + } + params.Memory = uint32(memoryVal) + + timeVal, err := strconv.ParseUint(paramParts[1][2:], 10, 32) + if err != nil { + return Argon2Params{}, nil, nil, errors.New("invalid hash format: failed to parse time") + } + params.Time = uint32(timeVal) + + parallelismVal, err := strconv.ParseUint(paramParts[2][2:], 10, 8) + if err != nil { + return Argon2Params{}, nil, nil, errors.New("invalid hash format: failed to parse parallelism") + } + params.Parallelism = uint8(parallelismVal) + + params.KeyLen = uint32(DefaultArgon2KeyLen) + + salt, err := base64.RawStdEncoding.DecodeString(parts[3]) + if err != nil { + return Argon2Params{}, nil, nil, errors.New("invalid hash format: failed to decode salt") + } + + hash, err := base64.RawStdEncoding.DecodeString(parts[4]) + if err != nil { + return Argon2Params{}, nil, nil, errors.New("invalid hash format: failed to decode hash") + } + + return params, salt, hash, nil +} diff --git a/backend/password/hashing_test.go b/backend/password/hashing_test.go new file mode 100644 index 0000000..403896a --- /dev/null +++ b/backend/password/hashing_test.go @@ -0,0 +1,41 @@ +package password_test + +import ( + "testing" + + "github.com/phishingclub/phishingclub/password" +) + +func TestArgon2HasherAndVerifier(t *testing.T) { + // Create a new hasher with default values + hasher := password.NewHasherWithDefaultValues() + + // Test password hashing + pass := "mysecretpassword" + hash, err := hasher.Hash(pass) + if err != nil { + t.Errorf("Failed to hash password: %v", err) + } + + // Create a new verifier + verifier := password.NewArgon2Verifier() + + // Test password verification + match, err := verifier.Verify(pass, hash) + if err != nil { + t.Errorf("Failed to verify password: %v", err) + } + if !match { + t.Error("Password verification failed") + } + + // Test incorrect password verification + incorrectPassword := "incorrectpassword" + match, err = verifier.Verify(incorrectPassword, hash) + if err != nil { + t.Errorf("Failed to verify password: %v", err) + } + if match { + t.Error("Incorrect password should not match") + } +} diff --git a/backend/random/generate.go b/backend/random/generate.go new file mode 100644 index 0000000..d3f3cd6 --- /dev/null +++ b/backend/random/generate.go @@ -0,0 +1,40 @@ +package random + +import ( + "crypto/rand" + "encoding/base64" + "fmt" + "math/big" + + "github.com/phishingclub/phishingclub/errs" +) + +// GenerateRandomLowerAndUpper generates random lower and upper case string +func GenerateRandomURLBase64Encoded(length int) (string, error) { + randomBytes, err := GenerateRandomBytes(length) + if err != nil { + return "", fmt.Errorf("failed to generate random string with random bytes: %w", err) + } + str := base64.URLEncoding.EncodeToString(randomBytes) + return str[:length], nil +} + +// GenerateRandomBytes generates random bytes +func GenerateRandomBytes(length int) ([]byte, error) { + buff := make([]byte, length) + _, err := rand.Read(buff) + if err != nil { + return []byte{}, fmt.Errorf("failed to generate random bytes: %w", err) + } + return buff, nil +} + +// RandomIntN generates a random number between 0 and n +func RandomIntN(n int) (int, error) { + max := big.NewInt(int64(n)) + randNum, err := rand.Int(rand.Reader, max) + if err != nil { + return 0, errs.Wrap(err) + } + return int(randNum.Int64()), nil +} diff --git a/backend/random/generate_test.go b/backend/random/generate_test.go new file mode 100644 index 0000000..5142ef4 --- /dev/null +++ b/backend/random/generate_test.go @@ -0,0 +1,17 @@ +package random + +import "testing" + +func TestGenerateRandomLowerUpperAndNumeric(t *testing.T) { + t.Run("should generate a random password of expected length", func(t *testing.T) { + length := 10 + password, err := GenerateRandomURLBase64Encoded(length) + if err != nil { + t.Errorf("expected no error, got %v", err) + } + if len(password) != length { + t.Errorf("expected password length to be %d, got %d", length, len(password)) + } + }) + +} diff --git a/backend/repository/allowDeny.go b/backend/repository/allowDeny.go new file mode 100644 index 0000000..e6a76f1 --- /dev/null +++ b/backend/repository/allowDeny.go @@ -0,0 +1,198 @@ +package repository + +import ( + "context" + "strings" + + "github.com/google/uuid" + "github.com/oapi-codegen/nullable" + "github.com/phishingclub/phishingclub/database" + "github.com/phishingclub/phishingclub/errs" + "github.com/phishingclub/phishingclub/model" + "github.com/phishingclub/phishingclub/vo" + "gorm.io/gorm" +) + +var allowDenyAllowColumns = assignTableToColumns(database.ALLOW_DENY_TABLE, []string{ + "created_at", + "updated_at", + "name", + "cidr", + "allowed", +}) + +type AllowDenyOption struct { + Fields []string + *vo.QueryArgs +} + +// AllowDeny is a repository for allow deny lists +type AllowDeny struct { + DB *gorm.DB +} + +// Insert inserts a new allow deny list +func (r *AllowDeny) Insert( + ctx context.Context, + conf *model.AllowDeny, +) (*uuid.UUID, error) { + id := uuid.New() + row := conf.ToDBMap() + row["id"] = id + AddTimestamps(row) + + res := r.DB.Model(&database.AllowDeny{}).Create(row) + + if res.Error != nil { + return nil, res.Error + } + return &id, nil +} + +// GetAll gets all allow deny lists +func (r *AllowDeny) GetAll( + ctx context.Context, + companyID *uuid.UUID, + options *AllowDenyOption, +) (*model.Result[model.AllowDeny], error) { + result := model.NewEmptyResult[model.AllowDeny]() + db := withCompanyIncludingNullContext(r.DB, companyID, "allow_denies") + db, err := useQuery(db, database.ALLOW_DENY_TABLE, options.QueryArgs, allowDenyAllowColumns...) + if err != nil { + return result, errs.Wrap(err) + } + var rows []*database.AllowDeny + if options.Fields != nil { + db = db.Select(strings.Join(options.Fields, ",")) + } + res := db. + Find(&rows) + + if res.Error != nil { + return result, res.Error + } + + hasNextPage, err := useHasNextPage( + db, + database.ALLOW_DENY_TABLE, + options.QueryArgs, + allowDenyAllowColumns..., + ) + if err != nil { + return result, errs.Wrap(err) + } + result.HasNextPage = hasNextPage + + for _, ad := range rows { + row := ToAllowDeny(ad) + result.Rows = append(result.Rows, row) + } + + return result, nil +} + +// GetAllByCompanyID gets all allow deny lists by company id +func (r *AllowDeny) GetAllByCompanyID( + ctx context.Context, + companyID *uuid.UUID, + options *AllowDenyOption, +) (*model.Result[model.AllowDeny], error) { + results := model.NewEmptyResult[model.AllowDeny]() + db := whereCompany(r.DB, database.ALLOW_DENY_TABLE, companyID) + db, err := useQuery(db, database.ALLOW_DENY_TABLE, options.QueryArgs, allowDenyAllowColumns...) + if err != nil { + return results, errs.Wrap(err) + } + var rows []*database.AllowDeny + res := db. + Find(&rows) + + if res.Error != nil { + return results, res.Error + } + + hasNextPage, err := useHasNextPage(db, database.ALLOW_DENY_TABLE, options.QueryArgs, allowDenyAllowColumns...) + if err != nil { + return results, errs.Wrap(err) + } + results.HasNextPage = hasNextPage + + for _, ad := range rows { + results.Rows = append(results.Rows, ToAllowDeny(ad)) + } + + return results, nil +} + +// GetByID gets an existing allow deny list +func (r *AllowDeny) GetByID( + ctx context.Context, + id *uuid.UUID, + option *AllowDenyOption, +) (*model.AllowDeny, error) { + + var row database.AllowDeny + res := r.DB.Where("id = ?", id).First(&row) + + if res.Error != nil { + return nil, res.Error + } + + return ToAllowDeny(&row), nil +} + +// Update updates an existing allow deny list +func (r *AllowDeny) Update( + ctx context.Context, + id uuid.UUID, + conf *model.AllowDeny, +) error { + row := conf.ToDBMap() + AddUpdatedAt(row) + + res := r.DB.Model(&database.AllowDeny{}). + Where("id = ?", id). + Updates(row) + + return res.Error +} + +// Delete deletes an existing allow deny list +func (r *AllowDeny) Delete( + ctx context.Context, + id uuid.UUID, +) error { + res := r.DB.Model(&database.AllowDeny{}). + Where("id = ?", id). + Delete(&database.AllowDeny{}) + + return res.Error +} + +func ToAllowDeny(row *database.AllowDeny) *model.AllowDeny { + id := nullable.NewNullableWithValue(*row.ID) + companyID := nullable.NewNullNullable[uuid.UUID]() + if row.CompanyID != nil { + companyID.Set(*row.CompanyID) + } + name := nullable.NewNullableWithValue(*vo.NewString127Must(row.Name)) + cidrs := vo.IPNetSlice{} + for _, cidr := range strings.Split(row.Cidrs, "\n") { + if len(cidr) == 0 { + continue + } + cidr := *vo.NewIPNetMust(cidr) + cidrs = append(cidrs, cidr) + } + cidrsNullable := nullable.NewNullableWithValue(cidrs) + + return &model.AllowDeny{ + ID: id, + CreatedAt: row.CreatedAt, + UpdatedAt: row.UpdatedAt, + Name: name, + Cidrs: cidrsNullable, + Allowed: nullable.NewNullableWithValue(row.Allowed), + CompanyID: companyID, + } +} diff --git a/backend/repository/apiSender.go b/backend/repository/apiSender.go new file mode 100644 index 0000000..7fce877 --- /dev/null +++ b/backend/repository/apiSender.go @@ -0,0 +1,348 @@ +package repository + +import ( + "context" + + "github.com/google/uuid" + "github.com/oapi-codegen/nullable" + "github.com/phishingclub/phishingclub/database" + "github.com/phishingclub/phishingclub/errs" + "github.com/phishingclub/phishingclub/model" + "github.com/phishingclub/phishingclub/vo" + "gorm.io/gorm" +) + +var apiSenderAllowedColumns = assignTableToColumns(database.API_SENDER_TABLE, []string{ + "created_at", + "updated_at", + "name", +}) + +// APISenderOption is options for preloading +type APISenderOption struct { + *vo.QueryArgs + + WithRequestHeaders bool + WithResponseHeaders bool +} + +// APISender is a API sender repository +type APISender struct { + DB *gorm.DB +} + +// preload applies the preloading options +func (a *APISender) preload(o *APISenderOption, db *gorm.DB) *gorm.DB { + if o == nil { + return db + } + return db +} + +// Insert inserts a new API sender +func (a *APISender) Insert( + ctx context.Context, + apiSender *model.APISender, +) (*uuid.UUID, error) { + id := uuid.New() + row := apiSender.ToDBMap() + row["id"] = id + AddTimestamps(row) + + res := a.DB. + Model(&database.APISender{}). + Create(row) + + if res.Error != nil { + return nil, res.Error + } + return &id, nil +} + +// GetByID gets a API sender by ID +func (a *APISender) GetByID( + ctx context.Context, + id *uuid.UUID, + option *APISenderOption, +) (*model.APISender, error) { + db := a.preload(option, a.DB) + + dbAPISender := &database.APISender{} + res := db. + Where("id = ?", id). + First(&dbAPISender) + + if res.Error != nil { + return nil, res.Error + } + return ToAPISender(dbAPISender) +} + +// GetAll gets API senders +func (a *APISender) GetAll( + ctx context.Context, + companyID *uuid.UUID, + option *APISenderOption, +) (*model.Result[model.APISender], error) { + result := model.NewEmptyResult[model.APISender]() + db := a.preload(option, a.DB) + db = withCompanyIncludingNullContext(db, companyID, database.API_SENDER_TABLE) + db, err := useQuery( + db, + database.API_SENDER_TABLE, + option.QueryArgs, + apiSenderAllowedColumns..., + ) + if err != nil { + return result, errs.Wrap(err) + } + dbAPISenders := []*database.APISender{} + res := db.Find(&dbAPISenders) + if res.Error != nil { + return result, res.Error + } + + hasNextPage, err := useHasNextPage( + db, + database.API_SENDER_TABLE, + option.QueryArgs, + apiSenderAllowedColumns..., + ) + if err != nil { + return result, errs.Wrap(err) + } + result.HasNextPage = hasNextPage + + for _, dbAPISender := range dbAPISenders { + apiSender, err := ToAPISender(dbAPISender) + if err != nil { + return nil, errs.Wrap(err) + } + result.Rows = append(result.Rows, apiSender) + } + return result, nil +} + +// GetAllOverview gets API senders with limited data +func (a *APISender) GetAllOverview( + ctx context.Context, + companyID *uuid.UUID, + option *APISenderOption, +) (*model.Result[model.APISender], error) { + result := model.NewEmptyResult[model.APISender]() + db := a.preload(option, a.DB) + db = withCompanyIncludingNullContext(db, companyID, database.API_SENDER_TABLE) + db, err := useQuery( + db, + database.API_SENDER_TABLE, + option.QueryArgs, + apiSenderAllowedColumns..., + ) + if err != nil { + return result, errs.Wrap(err) + } + dbAPISenders := []*database.APISender{} + res := db. + Select( + TableColumn(database.API_SENDER_TABLE, "id"), + TableColumn(database.API_SENDER_TABLE, "name"), + ). + Find(&dbAPISenders) + + if res.Error != nil { + return result, res.Error + } + for _, dbAPISender := range dbAPISenders { + apiSender, err := ToAPISenderOverview(dbAPISender) + if err != nil { + return result, errs.Wrap(err) + } + result.Rows = append(result.Rows, apiSender) + } + return result, nil +} + +// GetAllByCompanyID gets API senders by company id +func (a *APISender) GetAllByCompanyID( + ctx context.Context, + companyID *uuid.UUID, + option *APISenderOption, +) (*model.Result[model.APISender], error) { + result := model.NewEmptyResult[model.APISender]() + db := a.preload(option, a.DB) + db = whereCompany(db, database.API_SENDER_TABLE, companyID) + db, err := useQuery( + db, + database.API_SENDER_TABLE, + option.QueryArgs, + apiSenderAllowedColumns..., + ) + if err != nil { + return result, errs.Wrap(err) + } + dbAPISenders := []*database.APISender{} + res := db.Find(&dbAPISenders) + if res.Error != nil { + return result, res.Error + } + for _, dbAPISender := range dbAPISenders { + apiSender, err := ToAPISender(dbAPISender) + if err != nil { + return result, errs.Wrap(err) + } + result.Rows = append(result.Rows, apiSender) + } + return result, nil +} + +// GetByName gets a API sender by name +func (a *APISender) GetByName( + ctx context.Context, + name *vo.String64, + companyID *uuid.UUID, + option *APISenderOption, +) (*model.APISender, error) { + db := a.preload(option, a.DB) + db = withCompanyIncludingNullContext(db, companyID, "api_senders") + + dbAPISender := &database.APISender{} + res := db.Where("name = ?", name.String()).First(&dbAPISender) + if res.Error != nil { + return nil, res.Error + } + return ToAPISender(dbAPISender) +} + +// UpdateByID updates a API sender by ID +func (a *APISender) UpdateByID( + ctx context.Context, + id *uuid.UUID, + ent *model.APISender, +) error { + row := ent.ToDBMap() + AddUpdatedAt(row) + res := a.DB. + Model(&database.APISender{}). + Where("id = ?", id). + Updates(row) + + if res.Error != nil { + return res.Error + } + return nil +} + +// DeleteByID deletes a API sender by ID +func (a *APISender) DeleteByID( + ctx context.Context, + id *uuid.UUID, +) error { + res := a.DB.Where("id = ?", id).Delete(&database.APISender{}) + + if res.Error != nil { + return res.Error + } + return nil +} + +// ToAPISender converts a API sender database to a model +func ToAPISender(row *database.APISender) (*model.APISender, error) { + id := nullable.NewNullableWithValue(*row.ID) + companyID := nullable.NewNullNullable[uuid.UUID]() + if row.CompanyID != nil { + companyID.Set(*row.CompanyID) + } + name := nullable.NewNullableWithValue(*vo.NewString64Must(row.Name)) + apiKey := nullable.NewNullNullable[vo.OptionalString255]() + if row.APIKey != "" { + apiKey.Set(*vo.NewOptionalString255Must(row.APIKey)) + } else { + apiKey.SetUnspecified() + } + customField1 := nullable.NewNullableWithValue( + *vo.NewOptionalString255Must(row.CustomField1), + ) + customField2 := nullable.NewNullableWithValue( + *vo.NewOptionalString255Must(row.CustomField2), + ) + customField3 := nullable.NewNullableWithValue( + *vo.NewOptionalString255Must(row.CustomField3), + ) + customField4 := nullable.NewNullableWithValue( + *vo.NewOptionalString255Must(row.CustomField4), + ) + requestMethod := nullable.NewNullableWithValue( + *vo.NewHTTPMethodMust(row.RequestMethod), + ) + requestURL := nullable.NewNullableWithValue( + *vo.NewString255Must(row.RequestURL), + ) + requestHeaders := nullable.NewNullNullable[model.APISenderHeaders]() + if row.RequestHeaders != "" { + a, err := model.NewAPISenderHeader(row.RequestHeaders) + if err != nil { + return nil, errs.Wrap(err) + } + requestHeaders.Set(*a) + } else { + requestHeaders.SetUnspecified() + } + requestBody := nullable.NewNullableWithValue(*vo.NewOptionalString1MBMust(row.RequestBody)) + expectedResponseStatusCode := nullable.NewNullNullable[int]() + if row.ExpectedResponseStatusCode != 0 { + expectedResponseStatusCode.Set(row.ExpectedResponseStatusCode) + } else { + expectedResponseStatusCode.SetNull() + } + expectedResponseBody := nullable.NewNullableWithValue(*vo.NewOptionalString1MBMust( + row.ExpectedResponseBody, + )) + expectedResponseHeaders := nullable.NewNullNullable[model.APISenderHeaders]() + if row.ExpectedResponseHeaders != "" { + a, err := model.NewAPISenderHeader(row.ExpectedResponseHeaders) + if err != nil { + return nil, errs.Wrap(err) + } + expectedResponseHeaders.Set(*a) + } else { + expectedResponseHeaders.SetUnspecified() + } + + return &model.APISender{ + ID: id, + CreatedAt: row.CreatedAt, + UpdatedAt: row.UpdatedAt, + CompanyID: companyID, + Name: name, + APIKey: apiKey, + CustomField1: customField1, + CustomField2: customField2, + CustomField3: customField3, + CustomField4: customField4, + RequestMethod: requestMethod, + RequestURL: requestURL, + RequestHeaders: requestHeaders, + RequestBody: requestBody, + ExpectedResponseStatusCode: expectedResponseStatusCode, + ExpectedResponseBody: expectedResponseBody, + ExpectedResponseHeaders: expectedResponseHeaders, + }, nil +} + +// ToAPISenderOverview converts a API sender database to a overview model +func ToAPISenderOverview(row *database.APISender) (*model.APISender, error) { + id := nullable.NewNullableWithValue(*row.ID) + companyID := nullable.NewNullNullable[uuid.UUID]() + if row.CompanyID != nil { + companyID.Set(*row.CompanyID) + } + name := nullable.NewNullableWithValue(*vo.NewString64Must(row.Name)) + + return &model.APISender{ + ID: id, + CreatedAt: row.CreatedAt, + UpdatedAt: row.UpdatedAt, + CompanyID: companyID, + Name: name, + }, nil +} diff --git a/backend/repository/asset.go b/backend/repository/asset.go new file mode 100644 index 0000000..80d0a22 --- /dev/null +++ b/backend/repository/asset.go @@ -0,0 +1,255 @@ +package repository + +import ( + "context" + + "github.com/google/uuid" + "github.com/oapi-codegen/nullable" + "github.com/phishingclub/phishingclub/database" + "github.com/phishingclub/phishingclub/errs" + "github.com/phishingclub/phishingclub/model" + "github.com/phishingclub/phishingclub/vo" + "gorm.io/gorm" +) + +var assetAllowedColumns = assignTableToColumns(database.ASSET_TABLE, []string{ + "created_at", + "updated_at", + "name", + "description", + "path", +}) + +// Asset is a asset repository +type Asset struct { + DB *gorm.DB +} + +// Insert inserts a new asset +func (r *Asset) Insert( + ctx context.Context, + asset *model.Asset, +) (*uuid.UUID, error) { + id := uuid.New() + row := asset.ToDBMap() + row["id"] = id + AddTimestamps(row) + res := r.DB.Model(&database.Asset{}).Create(row) + + if res.Error != nil { + return nil, res.Error + } + return &id, nil +} + +func (r *Asset) GetAllByDomainAndContext( + ctx context.Context, + domainID *uuid.UUID, + companyID *uuid.UUID, + queryArgs *vo.QueryArgs, +) (*model.Result[model.Asset], error) { + result := model.NewEmptyResult[model.Asset]() + db := r.DB + // domain specific context + // TODO this might need to be refactored such that both domain id and company is + // indivuadually checked, this is important to check if roles are implemented + if domainID != nil { + db = db. + Joins("left join domains on domains.id = assets.domain_id"). + Select(r.joinSelectString()). + Where("(assets.company_id = ? OR assets.company_id IS NULL) AND (domain_id = ? OR domain_id IS NULL)", companyID, domainID) + } else { + db.Where("assets.company_id = ?", companyID) + } + db, err := useQuery(db, database.ASSET_TABLE, queryArgs, assetAllowedColumns...) + if err != nil { + return result, errs.Wrap(err) + } + + var dbModels []*database.Asset + dbRes := db. + Find(&dbModels) + + if dbRes.Error != nil { + return result, dbRes.Error + } + + hasNextPage, err := useHasNextPage(db, database.ASSET_TABLE, queryArgs, assetAllowedColumns...) + if err != nil { + return result, errs.Wrap(err) + } + result.HasNextPage = hasNextPage + + for _, dbModel := range dbModels { + result.Rows = append(result.Rows, ToAsset(dbModel)) + } + return result, nil +} + +func (r *Asset) joinSelectString() string { + return "assets.id AS id, assets.created_at AS created_at, assets.updated_at AS updated_at, assets.company_id AS company_id, assets.name AS name, assets.description AS description, assets.path AS path, domains.id AS domain_id, domains.name AS domain_name" +} + +// GetAllByGlobalContext gets all global assets +func (r *Asset) GetAllByGlobalContext( + ctx context.Context, + queryArgs *vo.QueryArgs, +) (*model.Result[model.Asset], error) { + result := model.NewEmptyResult[model.Asset]() + var db *gorm.DB + db, err := useQuery(r.DB, database.ASSET_TABLE, queryArgs, assetAllowedColumns...) + if err != nil { + return result, errs.Wrap(err) + } + var dbModels []*database.Asset + dbRes := db. + Where("company_id IS NULL"). + Find(&dbModels) + + if dbRes.Error != nil { + return nil, dbRes.Error + } + for _, dbModel := range dbModels { + result.Rows = append(result.Rows, ToAsset(dbModel)) + } + return result, nil +} + +// GetByPath gets an asset by file path +func (r *Asset) GetByPath( + ctx context.Context, + path string, +) (*model.Asset, error) { + var dbModel database.Asset + res := r.DB.Joins("left join domains on domains.id = assets.domain_id"). + Select("assets.*, domains.name AS domain_name"). + Where("assets.path = ?", path). + First(&dbModel) + + if res.Error != nil { + return nil, res.Error + } + return ToAsset(&dbModel), nil +} + +// GetByID gets an asset by id +func (r *Asset) GetByID( + ctx context.Context, + id *uuid.UUID, +) (*model.Asset, error) { + var dbModel database.Asset + res := r.DB.Joins("left join domains on domains.id = assets.domain_id"). + Select("assets.*, domains.name AS domain_name"). + Where("assets.id = ?", id). + First(&dbModel) + + if res.Error != nil { + return nil, res.Error + } + return ToAsset(&dbModel), nil +} + +// GetAllByCompanyID gets all assets by company id +func (r *Asset) GetAllByCompanyID( + ctx context.Context, + companyID *uuid.UUID, +) ([]*model.Asset, error) { + models := []*model.Asset{} + dbModels := []*database.Asset{} + res := r.DB.Model(&database.Asset{}). + Where("company_id = ?", companyID.String()). + Find(&dbModels) + + if res.Error != nil { + return models, res.Error + } + for _, dbModel := range dbModels { + models = append(models, ToAsset(dbModel)) + } + return models, nil +} + +// GetAllByDomainID gets all assets by company id +func (r *Asset) GetAllByDomainID( + ctx context.Context, + companyID *uuid.UUID, +) ([]*model.Asset, error) { + models := []*model.Asset{} + dbModels := []*database.Asset{} + res := r.DB.Model(&database.Asset{}). + Where("domain_id = ?", companyID.String()). + Find(&dbModels) + + if res.Error != nil { + return models, res.Error + } + for _, dbModel := range dbModels { + models = append(models, ToAsset(dbModel)) + } + return models, nil +} + +// UpdateByID updates an asset by id +func (r *Asset) UpdateByID( + ctx context.Context, + id *uuid.UUID, + asset *model.Asset, +) error { + row := asset.ToDBMap() + AddUpdatedAt(row) + res := r.DB.Model(&database.Asset{}). + Where("id = ?", id). + Updates(row) + + if res.Error != nil { + return res.Error + } + return nil +} + +// DeleteByID deletes an asset by id +func (r *Asset) DeleteByID( + ctx context.Context, + id *uuid.UUID, +) error { + result := r.DB.Where("id = ?", id).Delete(&database.Asset{}) + + if result.Error != nil { + return result.Error + } + return nil +} + +func ToAsset(row *database.Asset) *model.Asset { + id := nullable.NewNullableWithValue(*row.ID) + name := nullable.NewNullableWithValue( + *vo.NewOptionalString127Must(row.Name), + ) + description := nullable.NewNullableWithValue( + *vo.NewOptionalString255Must(row.Description), + ) + path := nullable.NewNullableWithValue( + *vo.NewRelativeFilePathMust(row.Path), + ) + asset := &model.Asset{ + ID: id, + CreatedAt: row.CreatedAt, + UpdatedAt: row.UpdatedAt, + Name: name, + Description: description, + Path: path, + } + asset.CompanyID = nullable.NewNullNullable[uuid.UUID]() + if row.CompanyID != nil { + asset.CompanyID.Set(*row.CompanyID) + } + asset.DomainID = nullable.NewNullNullable[uuid.UUID]() + if row.DomainID != nil { + asset.DomainID.Set(*row.DomainID) + } + asset.DomainName = nullable.NewNullNullable[vo.String255]() + if row.DomainName != "" { + asset.DomainName.Set(*vo.NewString255Must(row.DomainName)) + } + return asset +} diff --git a/backend/repository/attachment.go b/backend/repository/attachment.go new file mode 100644 index 0000000..c9d677e --- /dev/null +++ b/backend/repository/attachment.go @@ -0,0 +1,183 @@ +package repository + +import ( + "context" + + "github.com/google/uuid" + "github.com/oapi-codegen/nullable" + "github.com/phishingclub/phishingclub/database" + "github.com/phishingclub/phishingclub/errs" + "github.com/phishingclub/phishingclub/model" + "github.com/phishingclub/phishingclub/vo" + "gorm.io/gorm" +) + +var attachmentAllowedColumns = assignTableToColumns(database.ATTACHMENT_TABLE, []string{ + "created_at", + "updated_at", + "name", + "description", + "embedded_content", + "filename", +}) + +// Attachment is a attachment repository +type Attachment struct { + DB *gorm.DB +} + +// Insert inserts a new attachment +func (r *Attachment) Insert( + ctx context.Context, + attachment *model.Attachment, +) (*uuid.UUID, error) { + id := uuid.New() + row := attachment.ToDBMap() + row["id"] = id + AddTimestamps(row) + res := r.DB.Model(&database.Attachment{}).Create(row) + + if res.Error != nil { + return nil, res.Error + } + return &id, nil +} + +// GetAllByContext gets all attachments by global context and company id +func (r *Attachment) GetAllByContext( + ctx context.Context, + companyID *uuid.UUID, + query *vo.QueryArgs, +) (*model.Result[model.Attachment], error) { + result := model.NewEmptyResult[model.Attachment]() + db, err := useQuery(r.DB, database.ATTACHMENT_TABLE, query, attachmentAllowedColumns...) + if err != nil { + return result, errs.Wrap(err) + } + var dbModels []database.Attachment + dbRes := db. + Where("(company_id = ? OR company_id IS NULL)", companyID). + Find(&dbModels) + + if dbRes.Error != nil { + return result, dbRes.Error + } + + hasNextPage, err := useHasNextPage(db, database.ATTACHMENT_TABLE, query, attachmentAllowedColumns...) + if err != nil { + return result, errs.Wrap(err) + } + result.HasNextPage = hasNextPage + + for _, dbModel := range dbModels { + result.Rows = append(result.Rows, ToAttachment(&dbModel)) + } + return result, nil +} + +// GetAllByGlobalContext gets all global attachments +func (r *Attachment) GetAllByGlobalContext( + ctx context.Context, + query *vo.QueryArgs, +) (*model.Result[model.Attachment], error) { + result := model.NewEmptyResult[model.Attachment]() + var dbModels []database.Attachment + db, err := useQuery(r.DB, database.ATTACHMENT_TABLE, query, attachmentAllowedColumns...) + if err != nil { + return result, errs.Wrap(err) + } + dbRes := db. + Where("company_id IS NULL"). + Find(&dbModels) + + if dbRes.Error != nil { + return result, dbRes.Error + } + + hasNextPage, err := useHasNextPage(db, database.ATTACHMENT_TABLE, query, attachmentAllowedColumns...) + if err != nil { + return result, errs.Wrap(err) + } + result.HasNextPage = hasNextPage + + for _, dbModel := range dbModels { + result.Rows = append(result.Rows, ToAttachment(&dbModel)) + } + return result, nil +} + +// GetByID gets an attachment by id +func (r *Attachment) GetByID( + ctx context.Context, + id *uuid.UUID, +) (*model.Attachment, error) { + var dbModel database.Attachment + result := r.DB.Where("id = ?", id).First(&dbModel) + + if result.Error != nil { + return nil, result.Error + } + return ToAttachment(&dbModel), nil +} + +// UpdateByID updates an attachment by id +func (r *Attachment) UpdateByID( + ctx context.Context, + id *uuid.UUID, + attachment *model.Attachment, +) error { + row := attachment.ToDBMap() + AddUpdatedAt(row) + res := r.DB.Model(&database.Attachment{}). + Where("id = ?", id). + Updates(row) + + if res.Error != nil { + return res.Error + } + return nil +} + +// DeleteByID deletes an attachment by id +func (r *Attachment) DeleteByID( + ctx context.Context, + id *uuid.UUID, +) error { + result := r.DB.Where("id = ?", id).Delete(&database.Attachment{}) + + if result.Error != nil { + return result.Error + } + return nil +} + +// ToAttachment converts a attachment database row to a model +func ToAttachment(row *database.Attachment) *model.Attachment { + id := nullable.NewNullableWithValue(*row.ID) + name := nullable.NewNullableWithValue( + *vo.NewOptionalString127Must(row.Name), + ) + description := nullable.NewNullableWithValue( + *vo.NewOptionalString255Must(row.Description), + ) + filename := nullable.NewNullableWithValue( + *vo.NewFileNameMust(row.Filename), + ) + embeddedContent := nullable.NewNullableWithValue(row.EmbeddedContent) + attachment := &model.Attachment{ + ID: id, + CreatedAt: row.CreatedAt, + UpdatedAt: row.UpdatedAt, + Name: name, + Description: description, + FileName: filename, + EmbeddedContent: embeddedContent, + } + + attachment.CompanyID = nullable.NewNullNullable[uuid.UUID]() + if row.CompanyID != nil { + attachment.CompanyID.Set(*row.CompanyID) + } + + return attachment +} diff --git a/backend/repository/campaign.go b/backend/repository/campaign.go new file mode 100644 index 0000000..0a15eed --- /dev/null +++ b/backend/repository/campaign.go @@ -0,0 +1,1644 @@ +package repository + +import ( + "context" + "fmt" + "strings" + "time" + + "github.com/google/uuid" + "github.com/oapi-codegen/nullable" + "github.com/phishingclub/phishingclub/cache" + "github.com/phishingclub/phishingclub/data" + "github.com/phishingclub/phishingclub/database" + "github.com/phishingclub/phishingclub/errs" + "github.com/phishingclub/phishingclub/model" + "github.com/phishingclub/phishingclub/utils" + "github.com/phishingclub/phishingclub/vo" + "gorm.io/gorm" +) + +var allowedCampaignColumns = []string{ + TableColumn(database.CAMPAIGN_TABLE, "created_at"), + TableColumn(database.CAMPAIGN_TABLE, "updated_at"), + TableColumn(database.CAMPAIGN_TABLE, "close_at"), + TableColumn(database.CAMPAIGN_TABLE, "closed_at"), + TableColumn(database.CAMPAIGN_TABLE, "anonymize_at"), + TableColumn(database.CAMPAIGN_TABLE, "anonymized_at"), + TableColumn(database.CAMPAIGN_TABLE, "send_start_at"), + TableColumn(database.CAMPAIGN_TABLE, "send_end_at"), + TableColumn(database.CAMPAIGN_TABLE, "notable_event_id"), + TableColumn(database.CAMPAIGN_TABLE, "name"), + TableColumn(database.CAMPAIGN_TEMPLATE_TABLE, "name"), +} + +var allowedCampaginEventColumns = []string{ + TableColumn(database.CAMPAIGN_EVENT_TABLE, "created_at"), + TableColumn(database.CAMPAIGN_EVENT_TABLE, "updated_at"), + TableColumn(database.CAMPAIGN_EVENT_TABLE, "ip_address"), + TableColumn(database.CAMPAIGN_EVENT_TABLE, "user_agent"), + TableColumn(database.CAMPAIGN_EVENT_TABLE, "data"), +} + +var allowedCampaginEventViewColumns = utils.MergeStringSlices( + allowedCampaginEventColumns, + []string{ + TableColumn(database.RECIPIENT_TABLE, "email"), + TableColumn(database.RECIPIENT_TABLE, "first_name"), + TableColumn(database.RECIPIENT_TABLE, "last_name"), + TableColumn(database.EVENT_TABLE, "name"), + }) + +// CampaignOption is options for preloading +type CampaignOption struct { + *vo.QueryArgs + + WithCompany bool + WithCampaignTemplate bool + WithRecipientGroups bool + WithRecipientGroupCount bool + WithAllowDeny bool + WithDenyPage bool +} + +// CampaignEventOption is options for preloading +type CampaignEventOption struct { + *vo.QueryArgs + // WithCampaign bool + WithUser bool + EventTypeIDs []string +} + +// Campaign is a Campaign repository +type Campaign struct { + DB *gorm.DB +} + +// load preloads the campaign repository +func (r *Campaign) load(db *gorm.DB, options *CampaignOption) *gorm.DB { + if options.WithCompany { + db = db.Preload("Company") + } + if options.WithCampaignTemplate { + db = db.Joins(LeftJoinOn( + database.CAMPAIGN_TABLE, + "campaign_template_id", + database.CAMPAIGN_TEMPLATE_TABLE, + "id", + )) + } + if options.WithRecipientGroups { + db = db.Preload("RecipientGroups") + } + if options.WithAllowDeny { + db = db.Preload("AllowDeny") + } + if options.WithDenyPage { + db = db.Preload("DenyPage") + } + return db +} + +// preloadEventRecipient preloads the event user +func (r *Campaign) preloadEventRecipient(db *gorm.DB, options *CampaignEventOption) *gorm.DB { + if options.WithUser { + db = db.Preload("Recipient", func(db *gorm.DB) *gorm.DB { + return db + }) + } + return db +} + +// joinEvent joins the event table with the campaign event table +func (r *Campaign) joinEvent(db *gorm.DB) *gorm.DB { + return db.Joins(LeftJoinOn( + database.CAMPAIGN_EVENT_TABLE, + "event_id", + database.EVENT_TABLE, + "id", + )) +} + +// Insert inserts a new campaign +func (r *Campaign) Insert( + ctx context.Context, + campaign *model.Campaign, +) (*uuid.UUID, error) { + id := uuid.New() + row := campaign.ToDBMap() + row["id"] = id + AddTimestamps(row) + + res := r.DB.Model(&database.Campaign{}).Create(row) + + if res.Error != nil { + return nil, res.Error + } + + err := r.AddRecipientGroups(ctx, &id, campaign.RecipientGroupIDs.MustGet()) + if err != nil { + return nil, errs.Wrap(err) + } + if allowDeny, err := campaign.AllowDenyIDs.Get(); err == nil && len(allowDeny) > 0 { + err = r.AddAllowDenyLists(ctx, &id, allowDeny) + if err != nil { + return nil, errs.Wrap(err) + } + } + return &id, nil +} + +// Add recipient groups to campaign +func (r *Campaign) AddRecipientGroups( + ctx context.Context, + campaignID *uuid.UUID, + recipientGroupIDs []*uuid.UUID, +) error { + batch := []database.CampaignRecipientGroup{} + for _, id := range recipientGroupIDs { + batch = append(batch, database.CampaignRecipientGroup{ + CampaignID: campaignID, + RecipientGroupID: id, + }) + } + res := r.DB.Create(&batch) + + if res.Error != nil { + return res.Error + } + return nil +} + +// GetAllDenyByCampaignID gets all deny lists by campaign id +// not paginated +func (r *Campaign) GetAllDenyByCampaignID( + ctx context.Context, + campaignID *uuid.UUID, +) ([]*model.AllowDeny, error) { + allowDeny := []*model.AllowDeny{} + var dbAllowDeny []database.AllowDeny + res := r.DB. + Model(&database.AllowDeny{}). + Joins("LEFT JOIN campaign_allow_denies ON campaign_allow_denies.allow_deny_id = allow_denies.id"). + Where("campaign_id = ?", campaignID). + Find(&dbAllowDeny) + + if res.Error != nil { + return allowDeny, res.Error + } + for _, dbAllowDeny := range dbAllowDeny { + allowDeny = append(allowDeny, ToAllowDeny(&dbAllowDeny)) + } + return allowDeny, nil +} + +// AddAllowDenyLists allow/block lists to campaign +func (r *Campaign) AddAllowDenyLists( + ctx context.Context, + campaignID *uuid.UUID, + allowDenyIDs []*uuid.UUID, +) error { + + batch := []database.CampaignAllowDeny{} + for _, id := range allowDenyIDs { + batch = append(batch, database.CampaignAllowDeny{ + CampaignID: campaignID, + AllowDenyID: id, + }) + } + res := r.DB.Create(&batch) + + if res.Error != nil { + return res.Error + } + return nil +} + +// GetByWebhookID gets campaigns by webhook ID +// not paginated +func (r *Campaign) GetByWebhookID( + ctx context.Context, + webhookID *uuid.UUID, +) ([]*model.Campaign, error) { + rows := []*database.Campaign{} + models := []*model.Campaign{} + res := r.DB. + Where("webhook_id = ?", webhookID.String()). + Find(&rows) + + if res.Error != nil { + return models, res.Error + } + for _, row := range rows { + c, err := ToCampaign(row) + if err != nil { + return models, errs.Wrap(err) + } + models = append(models, c) + } + return models, nil +} + +// GetByTemplateID gets campaigns by template ID +// not paginated +func (r *Campaign) GetByAllowDenyID( + ctx context.Context, + allowDenyID *uuid.UUID, +) ([]*model.Campaign, error) { + rows := []*database.Campaign{} + models := []*model.Campaign{} + db := r.DB.InnerJoins( + LeftJoinOn( + database.CAMPAIGN_TABLE, + "id", + database.CAMPAIGN_ALLOW_DENY_TABLE, + "campaign_id", + ), + ) + res := db. + Where( + fmt.Sprintf( + "%s = ?", + TableColumn( + database.CAMPAIGN_ALLOW_DENY_TABLE, + "allow_deny_id", + ), + ), + allowDenyID.String(), + ). + Find(&rows) + + if res.Error != nil { + return models, res.Error + } + for _, row := range rows { + c, err := ToCampaign(row) + if err != nil { + return models, errs.Wrap(err) + } + models = append(models, c) + } + return models, nil +} + +// GetByTemplateIDs gets campaigns by template IDs +// not paginated +func (r *Campaign) GetByTemplateIDs( + ctx context.Context, + templateIDs []*uuid.UUID, +) ([]*model.Campaign, error) { + rows := []*database.Campaign{} + models := []*model.Campaign{} + res := r.DB. + Where( + "campaign_template_id IN ?", + UUIDsToStrings(templateIDs), + ). + Find(&rows) + + if res.Error != nil { + return models, res.Error + } + for _, row := range rows { + c, err := ToCampaign(row) + if err != nil { + return models, errs.Wrap(err) + } + models = append(models, c) + } + return models, nil +} + +// RemoveWebhookByCampaignIDs removes the webhook from campaigns by ids +func (r *Campaign) RemoveWebhookByCampaignIDs( + ctx context.Context, + campaignIDs []*uuid.UUID, +) error { + row := map[string]interface{}{} + ids := UUIDsToStrings(campaignIDs) + AddUpdatedAt(row) + row["webhook_id"] = nil + res := r.DB. + Model(&database.Campaign{}). + Where("id IN ?", ids). + Updates(row) + + if res.Error != nil { + return res.Error + } + return nil +} + +// RemoveDenyPageByCampaignIDs remove the deny page from the campaign IDs +func (r *Campaign) RemoveDenyPageByCampaignIDs( + ctx context.Context, + campaignIDs []*uuid.UUID, +) error { + row := map[string]interface{}{} + ids := UUIDsToStrings(campaignIDs) + AddUpdatedAt(row) + row["deny_page_id"] = nil + res := r.DB. + Model(&database.Campaign{}). + Where("id IN ?", ids). + Updates(row) + + if res.Error != nil { + return res.Error + } + return nil +} + +// RemoveDenyPageByDenyPageIDs removes deny page id from campaigns by page idsj +func (r *Campaign) RemoveDenyPageByDenyPageIDs( + ctx context.Context, + campaignIDs []*uuid.UUID, +) error { + row := map[string]interface{}{} + ids := UUIDsToStrings(campaignIDs) + AddUpdatedAt(row) + row["deny_page_id"] = nil + res := r.DB. + Model(&database.Campaign{}). + Where("deny_page_id IN ?", ids). + Updates(row) + + if res.Error != nil { + return res.Error + } + return nil +} + +// RemoveAllowDenyListsByID removes allow/block lists from campaign by allow deny list id +func (r *Campaign) RemoveAllowDenyListsByID( + ctx context.Context, + id *uuid.UUID, +) error { + res := r.DB. + Where("allow_deny_id = ?", id). + Delete(&database.CampaignAllowDeny{}) + + if res.Error != nil { + return res.Error + } + return nil +} + +// RemoveAllowDenyListsByCampaignID removes allow/block lists from campaign +func (r *Campaign) RemoveAllowDenyListsByCampaignID( + ctx context.Context, + campaignID *uuid.UUID, +) error { + res := r.DB. + Where("campaign_id = ?", campaignID). + Delete(&database.CampaignAllowDeny{}) + + if res.Error != nil { + return res.Error + } + return nil +} + +// GetRecipientGroupCount gets the count of recipient groups +func (r *Campaign) GetRecipientGroupCount( + ctx context.Context, + campaignID *uuid.UUID, +) (int, error) { + var count int64 + res := r.DB. + Model(&database.CampaignRecipientGroup{}). + Where("campaign_id = ?", campaignID). + Count(&count) + + if res.Error != nil { + return 0, res.Error + } + return int(count), nil +} + +// GetAllActive gets the active campaigns +func (r *Campaign) GetAllActive( + ctx context.Context, + companyID *uuid.UUID, + options *CampaignOption, +) (*model.Result[model.Campaign], error) { + result := model.NewEmptyResult[model.Campaign]() + db := r.load(r.DB, options) + if companyID != nil { + db = whereCompany(db, database.CAMPAIGN_TABLE, companyID) + } + + if strings.Contains(options.QueryArgs.OrderBy, "send_start_at") { + db = db.Order("send_start_at IS NULL DESC") + } + db, err := useQuery(db, database.CAMPAIGN_TABLE, options.QueryArgs, allowedCampaignColumns...) + if err != nil { + return result, errs.Wrap(err) + } + var dbCampaigns []database.Campaign + res := db. + Where( + "((send_start_at <= ? OR send_start_at IS NULL) AND closed_at IS NULL)", + utils.NowRFC3339UTC(), + ). + Find(&dbCampaigns) + + if res.Error != nil { + return result, res.Error + } + + hasNextPage, err := useHasNextPage( + db, + database.CAMPAIGN_TABLE, + options.QueryArgs, + allowedCampaignColumns..., + ) + if err != nil { + return result, errs.Wrap(err) + } + result.HasNextPage = hasNextPage + + for _, dbCampaign := range dbCampaigns { + campaign, err := ToCampaign(&dbCampaign) + if err != nil { + return result, errs.Wrap(err) + } + result.Rows = append(result.Rows, campaign) + } + return result, nil +} + +// GetAllUpcoming gets the upcoming campaigns +func (r *Campaign) GetAllUpcoming( + ctx context.Context, + companyID *uuid.UUID, + options *CampaignOption, +) (*model.Result[model.Campaign], error) { + result := model.NewEmptyResult[model.Campaign]() + db := r.load(r.DB, options) + if companyID != nil { + db = whereCompany(db, database.CAMPAIGN_TABLE, companyID) + } + db, err := useQuery(db, database.CAMPAIGN_TABLE, options.QueryArgs, allowedCampaignColumns...) + if err != nil { + return result, errs.Wrap(err) + } + var dbCampaigns []database.Campaign + res := db. + Where("((send_start_at > ?) AND closed_at IS NULL)", utils.NowRFC3339UTC()). + Find(&dbCampaigns) + + if res.Error != nil { + return result, res.Error + } + + hasNextPage, err := useHasNextPage( + db, + database.CAMPAIGN_TABLE, + options.QueryArgs, + allowedCampaignColumns..., + ) + if err != nil { + return result, errs.Wrap(err) + } + result.HasNextPage = hasNextPage + + for _, dbCampaign := range dbCampaigns { + campaign, err := ToCampaign(&dbCampaign) + if err != nil { + return nil, errs.Wrap(err) + } + result.Rows = append(result.Rows, campaign) + } + return result, nil +} + +// GetAllFinished gets the finished campaigns +func (r *Campaign) GetAllFinished( + ctx context.Context, + companyID *uuid.UUID, + options *CampaignOption, +) (*model.Result[model.Campaign], error) { + result := model.NewEmptyResult[model.Campaign]() + db := r.load(r.DB, options) + if companyID != nil { + db = whereCompany(db, database.CAMPAIGN_TABLE, companyID) + } + if strings.Contains(options.QueryArgs.OrderBy, "send_start_at") { + db = db.Order("send_start_at IS NULL DESC") + } + db, err := useQuery(db, database.CAMPAIGN_TABLE, options.QueryArgs, allowedCampaignColumns...) + if err != nil { + return result, errs.Wrap(err) + } + var dbCampaigns []database.Campaign + res := db. + Where("closed_at IS NOT NULL"). + Find(&dbCampaigns) + + if res.Error != nil { + return result, res.Error + } + + hasNextPage, err := useHasNextPage( + db, + database.CAMPAIGN_TABLE, + options.QueryArgs, + allowedCampaignColumns..., + ) + if err != nil { + return result, errs.Wrap(err) + } + result.HasNextPage = hasNextPage + + for _, dbCampaign := range dbCampaigns { + campaign, err := ToCampaign(&dbCampaign) + if err != nil { + return nil, errs.Wrap(err) + } + result.Rows = append(result.Rows, campaign) + } + return result, nil +} + +// GetEventsByCampaignID gets all campaign events by campaign id +func (r *Campaign) GetEventsByCampaignID( + ctx context.Context, + campaignID *uuid.UUID, + options *CampaignEventOption, + since *time.Time, +) (*model.Result[model.CampaignEvent], error) { + result := model.NewEmptyResult[model.CampaignEvent]() + db := r.preloadEventRecipient(r.DB, options) + db = r.joinEvent(db) + db, err := useQuery(db, database.CAMPAIGN_TABLE, options.QueryArgs, allowedCampaginEventViewColumns...) + if err != nil { + return result, errs.Wrap(err) + } + var dbCampaignEvents []database.CampaignEvent + db = db. + Joins(LeftJoinOn( + database.CAMPAIGN_EVENT_TABLE, + "recipient_id", + database.RECIPIENT_TABLE, + "id", + )). + Where("campaign_id = ?", campaignID) + + if since != nil { + db = db.Where( + TableColumn(database.CAMPAIGN_EVENT_TABLE, "created_at")+" > ?", + utils.RFC3339UTC(*since), + ) + } + + if len(options.EventTypeIDs) > 0 { + db = db.Where( + TableColumn(database.CAMPAIGN_EVENT_TABLE, "event_id")+" IN ?", + options.EventTypeIDs, + ) + } + + res := db.Find(&dbCampaignEvents) + + if res.Error != nil { + return result, res.Error + } + + hasNextPage, err := useHasNextPage( + db, + database.CAMPAIGN_TABLE, + options.QueryArgs, + allowedCampaginEventViewColumns..., + ) + if err != nil { + return result, errs.Wrap(err) + } + result.HasNextPage = hasNextPage + + for _, dbCampaignEvent := range dbCampaignEvents { + c, err := ToCampaignEvent(&dbCampaignEvent) + if err != nil { + return nil, errs.Wrap(err) + } + result.Rows = append(result.Rows, c) + } + return result, nil +} + +// GetCampaignCountByTemplateID gets the count of campaigns by template id +func (r *Campaign) GetCampaignCountByTemplateID( + ctx context.Context, + templateID *uuid.UUID, +) (int, error) { + var count int64 + res := r.DB. + Model(&database.Campaign{}). + Where("campaign_template_id = ?", templateID). + Count(&count) + + if res.Error != nil { + return 0, res.Error + } + return int(count), nil +} + +// GetResultStats gets the read, clicked and submitted data grouped per recipient +// or by anon id if anonymized data +func (r *Campaign) GetResultStats( + ctx context.Context, + campaignID *uuid.UUID, +) (*model.CampaignResultView, error) { + stats := &model.CampaignResultView{} + + // get recipients count for campaign + res := r.DB.Raw(` + SELECT COUNT(*) FROM ( + SELECT DISTINCT recipient_id + FROM campaign_recipients + WHERE campaign_id = ? + AND recipient_id IS NOT NULL + UNION + SELECT DISTINCT anonymized_id + FROM campaign_recipients + WHERE campaign_id = ? + AND anonymized_id IS NOT NULL + ) as unique_ids + `, campaignID, campaignID).Scan(&stats.Recipients) + + if res.Error != nil { + return nil, res.Error + } + + // get sent email count + res = r.DB.Raw(` + SELECT COUNT(*) FROM ( + SELECT DISTINCT recipient_id + FROM campaign_events + WHERE campaign_id = ? + AND recipient_id IS NOT NULL + AND event_id = ? + UNION + SELECT DISTINCT anonymized_id + FROM campaign_events + WHERE campaign_id = ? AND anonymized_id IS NOT NULL + AND event_id = ? + ) as unique_ids +`, + campaignID, + cache.EventIDByName[data.EVENT_CAMPAIGN_RECIPIENT_MESSAGE_SENT], + campaignID, + cache.EventIDByName[data.EVENT_CAMPAIGN_RECIPIENT_MESSAGE_SENT], + ).Scan(&stats.EmailsSent) + + if res.Error != nil { + return nil, res.Error + } + + // get unique tracking pixels loaded + res = r.DB.Raw(` + SELECT COUNT(*) FROM ( + SELECT DISTINCT recipient_id + FROM campaign_events + WHERE campaign_id = ? + AND event_id = ? + AND recipient_id IS NOT NULL + UNION + SELECT DISTINCT anonymized_id + FROM campaign_events + WHERE campaign_id = ? + AND event_id = ? AND anonymized_id IS NOT NULL + ) as unique_ids +`, + campaignID, + cache.EventIDByName[data.EVENT_CAMPAIGN_RECIPIENT_MESSAGE_READ], + campaignID, + cache.EventIDByName[data.EVENT_CAMPAIGN_RECIPIENT_MESSAGE_READ], + ).Scan(&stats.TrackingPixelLoaded) + + if res.Error != nil { + return nil, res.Error + } + + // Get any phishing page loaded distinct by recipent and campaign + res = r.DB.Raw(` + SELECT COUNT(*) FROM ( + SELECT DISTINCT recipient_id + FROM campaign_events + WHERE campaign_id = ? + AND event_id IN (?, ?, ?) + AND recipient_id IS NOT NULL + UNION + SELECT DISTINCT anonymized_id + FROM campaign_events + WHERE campaign_id = ? + AND event_id IN (?, ?, ?) + AND anonymized_id IS NOT NULL + ) as unique_ids +`, + campaignID, + cache.EventIDByName[data.EVENT_CAMPAIGN_RECIPIENT_BEFORE_PAGE_VISITED], + cache.EventIDByName[data.EVENT_CAMPAIGN_RECIPIENT_PAGE_VISITED], + cache.EventIDByName[data.EVENT_CAMPAIGN_RECIPIENT_AFTER_PAGE_VISITED], + campaignID, + cache.EventIDByName[data.EVENT_CAMPAIGN_RECIPIENT_BEFORE_PAGE_VISITED], + cache.EventIDByName[data.EVENT_CAMPAIGN_RECIPIENT_PAGE_VISITED], + cache.EventIDByName[data.EVENT_CAMPAIGN_RECIPIENT_AFTER_PAGE_VISITED], + ).Scan(&stats.WebsiteLoaded) + + if res.Error != nil { + return nil, res.Error + } + + // Get unique submits + res = r.DB.Raw(` + SELECT COUNT(*) FROM ( + SELECT DISTINCT campaign_id + FROM campaign_events + WHERE campaign_id = ? + AND event_id = ? + AND recipient_id IS NOT NULL + UNION + SELECT DISTINCT campaign_id + FROM campaign_events + WHERE campaign_id = ? + AND event_id = ? + AND anonymized_id IS NOT NULL + ) as unique_ids +`, + campaignID, + cache.EventIDByName[data.EVENT_CAMPAIGN_RECIPIENT_SUBMITTED_DATA], + campaignID, + cache.EventIDByName[data.EVENT_CAMPAIGN_RECIPIENT_SUBMITTED_DATA], + ).Scan(&stats.SubmittedData) + + if res.Error != nil { + return nil, res.Error + } + + return stats, nil +} + +// GetAll gets all campaigns with pagination +func (r *Campaign) GetAll( + ctx context.Context, + companyID *uuid.UUID, + options *CampaignOption, +) (*model.Result[model.Campaign], error) { + result := model.NewEmptyResult[model.Campaign]() + db := r.load(r.DB, options) + if companyID == nil { + db = whereCompanyIsNull(db, database.CAMPAIGN_TABLE) + } else { + db = whereCompany(db, database.CAMPAIGN_TABLE, companyID) + } + db, err := useQuery(db, database.CAMPAIGN_TABLE, options.QueryArgs, allowedCampaignColumns...) + if err != nil { + return result, errs.Wrap(err) + } + var dbCampaigns []database.Campaign + res := db.Find(&dbCampaigns) + if res.Error != nil { + return result, res.Error + } + + hasNextPage, err := useHasNextPage( + db, database.CAMPAIGN_TABLE, options.QueryArgs, allowedCampaignColumns..., + ) + if err != nil { + return result, errs.Wrap(err) + } + result.HasNextPage = hasNextPage + + for _, dbCampaign := range dbCampaigns { + campaign, err := ToCampaign(&dbCampaign) + if err != nil { + return nil, errs.Wrap(err) + } + result.Rows = append(result.Rows, campaign) + } + return result, nil +} + +// GetAllCampaignWithinDates gets all campaigns that are active or scheduled within two dates, including the dates themself. +// if no company id is set, it retrieves all contexts +func (r *Campaign) GetAllCampaignWithinDates( + ctx context.Context, + companyID *uuid.UUID, + startDate time.Time, + endDate time.Time, + options *CampaignOption, +) (*model.Result[model.Campaign], error) { + result := model.NewEmptyResult[model.Campaign]() + db := r.load(r.DB, options) + + // Handle company ID filter + /* + if companyID == nil { + db = whereCompanyIsNull(db, database.CAMPAIGN_TABLE) + } else { + db = whereCompany(db, database.CAMPAIGN_TABLE, companyID) + } + */ + if companyID != nil { + db = whereCompany(db, database.CAMPAIGN_TABLE, companyID) + } + + db, err := useQuery(db, database.CAMPAIGN_TABLE, options.QueryArgs, allowedCampaignColumns...) + if err != nil { + return result, errs.Wrap(err) + } + + var dbCampaigns []database.Campaign + + // Query campaigns that: + // 1. Are self-managed (no send_start_at) + // 2. Start within the date range + res := db.Where( + "(send_start_at IS NULL) OR "+ // self managed + "(send_start_at BETWEEN ? AND ?) ", // is within time + utils.RFC3339UTC(startDate), + utils.RFC3339UTC(endDate), + ).Find(&dbCampaigns) + + if res.Error != nil { + return result, res.Error + } + + hasNextPage, err := useHasNextPage( + db, + database.CAMPAIGN_TABLE, + options.QueryArgs, + allowedCampaignColumns..., + ) + if err != nil { + return result, errs.Wrap(err) + } + result.HasNextPage = hasNextPage + + for _, dbCampaign := range dbCampaigns { + campaign, err := ToCampaign(&dbCampaign) + if err != nil { + return result, errs.Wrap(err) + } + result.Rows = append(result.Rows, campaign) + } + + return result, nil +} + +// GetAllByCompanyID gets all campaigns with pagination by company id +func (r *Campaign) GetAllByCompanyID( + ctx context.Context, + companyID *uuid.UUID, + options *CampaignOption, +) (*model.Result[model.Campaign], error) { + result := model.NewEmptyResult[model.Campaign]() + db := r.load(r.DB, options) + db = whereCompany(db, database.CAMPAIGN_TABLE, companyID) + db, err := useQuery(db, database.CAMPAIGN_TABLE, options.QueryArgs, allowedCampaignColumns...) + if err != nil { + return result, errs.Wrap(err) + } + var dbCampaigns []database.Campaign + res := db.Find(&dbCampaigns) + if res.Error != nil { + return result, res.Error + } + + hasNextPage, err := useHasNextPage( + db, database.CAMPAIGN_TABLE, options.QueryArgs, allowedCampaignColumns..., + ) + if err != nil { + return result, errs.Wrap(err) + } + result.HasNextPage = hasNextPage + + for _, dbCampaign := range dbCampaigns { + campaign, err := ToCampaign(&dbCampaign) + if err != nil { + return nil, errs.Wrap(err) + } + result.Rows = append(result.Rows, campaign) + } + return result, nil +} + +// GetByID gets a campaign by id +func (r *Campaign) GetByID( + ctx context.Context, + id *uuid.UUID, + options *CampaignOption, +) (*model.Campaign, error) { + db := r.load(r.DB, options) + var dbCampaign database.Campaign + res := db. + Where("campaigns.id = ?", id.String()). + First(&dbCampaign) + + if res.Error != nil { + return nil, res.Error + } + return ToCampaign(&dbCampaign) +} + +// GetNameByID gets a campaign name by id +func (r *Campaign) GetNameByID( + ctx context.Context, + id *uuid.UUID, +) (string, error) { + var dbCampaign database.Campaign + res := r.DB. + Model(&database.Campaign{}). + Select("name"). + Where("id = ?", id). + First(&dbCampaign) + + if res.Error != nil { + return "", res.Error + } + return dbCampaign.Name, nil +} + +// GetByNameAndCompanyID gets a campaign by name and company id +func (r *Campaign) GetByNameAndCompanyID( + ctx context.Context, + name string, + companyID *uuid.UUID, + options *CampaignOption, +) (*model.Campaign, error) { + db := r.load(r.DB, options) + db = withCompanyIncludingNullContext(db, companyID, database.CAMPAIGN_TABLE) + var dbCampaign database.Campaign + res := db. + Where( + fmt.Sprintf( + "%s = ?", + TableColumn(database.CAMPAIGN_TABLE, "name"), + ), + name, + ). + First(&dbCampaign) + + if res.Error != nil { + return nil, res.Error + } + return ToCampaign(&dbCampaign) +} + +// GetWebhookIDByCampaignID gets a webhook id by campaign id +func (r *Campaign) GetWebhookIDByCampaignID( + ctx context.Context, + campaignID *uuid.UUID, +) (*uuid.UUID, error) { + var campaign database.Campaign + res := r.DB. + Model(&database.Campaign{}). + Select("webhook_id"). + Where("id = ?", campaignID.String()). + First(&campaign) + + if res.Error != nil { + return nil, res.Error + } + return campaign.WebhookID, nil +} + +// GetAllReadyToClose gets all campaigns that are ready to close +func (r *Campaign) GetAllReadyToClose( + ctx context.Context, + options *CampaignOption, +) (*model.Result[model.Campaign], error) { + result := model.NewEmptyResult[model.Campaign]() + db := r.load(r.DB, options) + db, err := useQuery(db, database.CAMPAIGN_TABLE, options.QueryArgs) + if err != nil { + return result, errs.Wrap(err) + } + var dbCampaigns []database.Campaign + res := db. + Where("close_at <= ? AND closed_at IS NULL", utils.NowRFC3339UTC()). + Find(&dbCampaigns) + + if res.Error != nil { + return result, res.Error + } + + hasNextPage, err := useHasNextPage(db, database.CAMPAIGN_TABLE, options.QueryArgs) + if err != nil { + return result, errs.Wrap(err) + } + result.HasNextPage = hasNextPage + + for _, dbCampaign := range dbCampaigns { + campaign, err := ToCampaign(&dbCampaign) + if err != nil { + return nil, errs.Wrap(err) + } + result.Rows = append(result.Rows, campaign) + } + return result, nil +} + +// GetReadyToAnonymize gets all campaigns that are ready to be anonymized +func (r *Campaign) GetReadyToAnonymize( + ctx context.Context, + options *CampaignOption, +) (*model.Result[model.Campaign], error) { + result := model.NewEmptyResult[model.Campaign]() + db := r.load(r.DB, options) + db, err := useQuery(db, database.CAMPAIGN_TABLE, options.QueryArgs) + if err != nil { + return result, errs.Wrap(err) + } + var dbCampaigns []database.Campaign + res := db. + Where("anonymize_at <= ? AND anonymized_at IS NULL", utils.NowRFC3339UTC()). + Find(&dbCampaigns) + if res.Error != nil { + return result, res.Error + } + + hasNextPage, err := useHasNextPage(db, database.CAMPAIGN_TABLE, options.QueryArgs) + if err != nil { + return result, errs.Wrap(err) + } + result.HasNextPage = hasNextPage + + for _, dbCampaign := range dbCampaigns { + campaign, err := ToCampaign(&dbCampaign) + if err != nil { + return nil, errs.Wrap(err) + } + result.Rows = append(result.Rows, campaign) + } + return result, nil +} + +// SaveEvent saves a campaign event +func (r *Campaign) SaveEvent( + ctx context.Context, + campaignEvent *model.CampaignEvent, +) error { + row := map[string]any{ + "id": campaignEvent.ID.String(), + "event_id": campaignEvent.EventID.String(), + "campaign_id": campaignEvent.CampaignID.String(), + "ip_address": campaignEvent.IP.String(), + "user_agent": campaignEvent.UserAgent.String(), + "data": campaignEvent.Data.String(), + } + if campaignEvent.RecipientID != nil { + row["recipient_id"] = campaignEvent.RecipientID.String() + } + AddTimestamps(row) + res := r.DB.Model(&database.CampaignEvent{}).Create(row) + if res.Error != nil { + return res.Error + } + return nil +} + +// UpdateByID updates a campaign by id +// does not update the campaign recipient groups and campaign recipients +func (r *Campaign) UpdateByID( + ctx context.Context, + id *uuid.UUID, + campaign *model.Campaign, +) error { + row := campaign.ToDBMap() + AddUpdatedAt(row) + res := r.DB. + Model(&database.Campaign{}). + Where("id = ?", id). + Updates(row) + + if allowDeny, err := campaign.AllowDenyIDs.Get(); err == nil { + denyLen := len(allowDeny) + err = r.RemoveAllowDenyListsByCampaignID(ctx, id) + if err != nil { + return err + } + if denyLen > 0 { + err = r.AddAllowDenyLists(ctx, id, allowDeny) + if err != nil { + return err + } + } + } + + if res.Error != nil { + return res.Error + } + return nil +} + +// RemoveCampaignRecipientGroups removes all recipient groups from a campaign +func (r *Campaign) RemoveCampaignRecipientGroups( + ctx context.Context, + campaignID *uuid.UUID, +) error { + res := r.DB. + Where("campaign_id = ?", campaignID). + Delete(&database.CampaignRecipientGroup{}) + + if res.Error != nil { + return res.Error + } + return nil +} + +// RemoveCampaignRecipientGroupByGroupID removes a group from a campaign +func (r *Campaign) RemoveCampaignRecipientGroupByGroupID( + ctx context.Context, + recipientGroupID *uuid.UUID, +) error { + res := r.DB. + Where("recipient_group_id = ?", recipientGroupID). + Delete(&database.CampaignRecipientGroup{}) + + if res.Error != nil { + return res.Error + } + return nil +} + +// RemoveCampaignTemplateIDFromCampaigns removes campaign template id from all +// campaign that use it. +func (r *Campaign) RemoveCampaignTemplateIDFromCampaigns( + ctx context.Context, + campaignTemplateID *uuid.UUID, +) error { + row := map[string]interface{}{} + AddUpdatedAt(row) + row["campaign_template_id"] = nil + res := r.DB. + Model(&database.Campaign{}). + Where( + fmt.Sprintf( + "%s = ?", + TableColumn(database.CAMPAIGN_TABLE, "campaign_template_id"), + ), + campaignTemplateID.String(), + ). + Updates(row) + + if res.Error != nil { + return res.Error + } + return nil +} + +// DeleteByID deletes a campaign by id including its stats +func (r *Campaign) DeleteByID( + ctx context.Context, + id *uuid.UUID, +) error { + res := r.DB. + Where("id = ?", id). + Delete(&database.Campaign{}) + + if res.Error != nil { + return res.Error + } + return r.DeleteCampaignStats(ctx, id) +} + +// DeleteEventsByCampaignID deletes all events by campaign id +func (r *Campaign) DeleteEventsByCampaignID( + ctx context.Context, + campaignID *uuid.UUID, +) error { + res := r.DB. + Where("campaign_id = ?", campaignID). + Delete(&database.CampaignEvent{}) + + if res.Error != nil { + return res.Error + } + return nil +} + +// AddAnonymizedAt adds an anonymized at time to a campaign +func (r *Campaign) AddAnonymizedAt( + ctx context.Context, + id *uuid.UUID, +) error { + row := map[string]interface{}{ + "anonymized_at": utils.NowRFC3339UTC(), + } + AddUpdatedAt(row) + res := r.DB. + Model(&database.Campaign{}). + Where("id = ?", id). + Updates(row) + + if res.Error != nil { + return res.Error + } + return nil +} + +// AnonymizeCampaignEvent anonymizes a campaign event +func (r *Campaign) AnonymizeCampaignEvent( + ctx context.Context, + campaignID *uuid.UUID, + recipientID *uuid.UUID, + anonymizedID *uuid.UUID, +) error { + row := map[string]any{ + "recipient_id": nil, + "anonymized_id": anonymizedID.String(), + "user_agent": "anonymized", + "ip_address": nil, + "data": "anonymized", + } + AddUpdatedAt(row) + res := r.DB. + Model(&database.CampaignEvent{}). + Where("campaign_id = ? AND recipient_id = ?", campaignID, recipientID). + Updates(row) + + if res.Error != nil { + return res.Error + } + return nil +} + +// AnonymizeCampaignEventsByRecipientID anonymizes campaign events by recipient ID +func (r *Campaign) AnonymizeCampaignEventsByRecipientID( + ctx context.Context, + recipientID *uuid.UUID, + anonymizedID *uuid.UUID, +) error { + row := map[string]interface{}{ + "recipient_id": nil, + "anonymized_id": anonymizedID, + "user_agent": "anonymized", + "ip_address": nil, + "data": "anonymized", + } + AddUpdatedAt(row) + res := r.DB. + Model(&database.CampaignEvent{}). + Where("recipient_id = ?", recipientID). + Updates(row) + + if res.Error != nil { + return res.Error + } + return nil +} + +// GetActiveCount get the number running campaigns +// if no company ID is selected it gets the global count including all companies +func (r *Campaign) GetActiveCount(ctx context.Context, companyID *uuid.UUID) (int64, error) { + var c int64 + db := r.DB + if companyID != nil { + db = whereCompany(db, database.CAMPAIGN_TABLE, companyID) + } + res := db. + Model(&database.Campaign{}). + Where( + "((send_start_at <= ? OR send_start_at IS NULL) AND closed_at IS NULL AND is_test IS false)", + utils.NowRFC3339UTC(), + ). + Count(&c) + + return c, res.Error +} + +// GetUpcomingCount get the upcoming campaign count +// if no company ID is selected it gets the global count including all companies +func (r *Campaign) GetUpcomingCount(ctx context.Context, companyID *uuid.UUID) (int64, error) { + var c int64 + db := r.DB + if companyID != nil { + db = whereCompany(db, database.CAMPAIGN_TABLE, companyID) + } + res := db. + Model(&database.Campaign{}). + Where( + "((send_start_at > ?) AND closed_at IS NULL AND is_test IS false)", utils.NowRFC3339UTC(), + ). + Count(&c) + + return c, res.Error +} + +// GetFinishedCount get the finished campaign count +// if no company ID is selected it gets the global count including all companies +func (r *Campaign) GetFinishedCount(ctx context.Context, companyID *uuid.UUID) (int64, error) { + var c int64 + db := r.DB + if companyID != nil { + db = whereCompany(db, database.CAMPAIGN_TABLE, companyID) + } + res := db. + Model(&database.Campaign{}). + Where("closed_at IS NOT NULL AND is_test IS false"). + Count(&c) + + return c, res.Error +} + +func ToCampaign(row *database.Campaign) (*model.Campaign, error) { + id := nullable.NewNullableWithValue(*row.ID) + companyID := nullable.NewNullNullable[uuid.UUID]() + if row.CompanyID != nil { + companyID.Set(*row.CompanyID) + } + var company *model.Company + if row.Company != nil { + company = ToCompany(row.Company) + } + name := nullable.NewNullableWithValue(*vo.NewString64Must(row.Name)) + var closeAt nullable.Nullable[time.Time] + closeAt.SetNull() + if row.CloseAt != nil { + closeAt = nullable.NewNullableWithValue(*row.CloseAt) + } + var closedAt nullable.Nullable[time.Time] + closedAt.SetNull() + if row.ClosedAt != nil { + closedAt = nullable.NewNullableWithValue(*row.ClosedAt) + } + var sortField nullable.Nullable[vo.CampaignSortField] + if row.SortField != "" { + sf, err := vo.NewCampaignSortField(row.SortField) + if err != nil { + return nil, errs.Wrap(err) + } + sortField = nullable.NewNullableWithValue(*sf) + } + var sortOrder nullable.Nullable[vo.CampaignSendingOrder] + if row.SortOrder != "" { + so, err := vo.NewCampaignSendingOrder(row.SortOrder) + if err != nil { + return nil, errs.Wrap(err) + } + sortOrder = nullable.NewNullableWithValue(*so) + } + var sendStartAt nullable.Nullable[time.Time] + if row.SendStartAt != nil { + sendStartAt = nullable.NewNullableWithValue(*row.SendStartAt) + } else { + sendStartAt.SetNull() + } + var sendEndAt nullable.Nullable[time.Time] + if row.SendEndAt != nil { + sendEndAt = nullable.NewNullableWithValue(*row.SendEndAt) + } else { + sendEndAt.SetNull() + } + saveSubmittedData := nullable.NewNullableWithValue(row.SaveSubmittedData) + isAnonymous := nullable.NewNullableWithValue(row.IsAnonymous) + isTest := nullable.NewNullableWithValue(row.IsTest) + var templateID nullable.Nullable[uuid.UUID] + if row.CampaignTemplateID != nil { + templateID = nullable.NewNullableWithValue(*row.CampaignTemplateID) + } + var template *model.CampaignTemplate + if row.CampaignTemplate != nil { + var err error + template, err = ToCampaignTemplate(row.CampaignTemplate) + if err != nil { + return nil, errs.Wrap(err) + } + } + recipientGroups := []*model.RecipientGroup{} + recipientGroupIDs := []*uuid.UUID{} + if row.RecipientGroups != nil { + for _, rg := range row.RecipientGroups { + r, err := ToRecipientGroup(rg) + if err != nil { + return nil, errs.Wrap(err) + } + recipientGroups = append(recipientGroups, r) + recipientGroupIDs = append(recipientGroupIDs, rg.ID) + } + } + allowDeny := []*model.AllowDeny{} + if row.AllowDeny != nil { + for _, ad := range row.AllowDeny { + allowDeny = append(allowDeny, ToAllowDeny(ad)) + } + } + var denyPage *model.Page + if row.DenyPage != nil { + dp, err := ToPage(row.DenyPage) + if err != nil { + return nil, errs.Wrap(err) + } + denyPage = dp + } + denyPageID := nullable.NewNullNullable[uuid.UUID]() + if row.DenyPageID != nil { + denyPageID.Set(*row.DenyPageID) + } else { + denyPageID.SetNull() + } + + constraintWeekDays := nullable.NewNullNullable[vo.CampaignWeekDays]() + if row.ConstraintWeekDays != nil { + weekDays, err := vo.NewCampaignWeekDays(*row.ConstraintWeekDays) + if err != nil { + return nil, errs.Wrap(err) + } + constraintWeekDays.Set(*weekDays) + } + constraintStartTime := nullable.NewNullNullable[vo.CampaignTimeConstraint]() + if row.ConstraintStartTime != nil { + t, err := vo.NewCampaignTimeConstraint(*row.ConstraintStartTime) + if err != nil { + return nil, errs.Wrap(err) + } + constraintStartTime.Set(*t) + } + constraintEndTime := nullable.NewNullNullable[vo.CampaignTimeConstraint]() + if row.ConstraintEndTime != nil { + t, err := vo.NewCampaignTimeConstraint(*row.ConstraintEndTime) + if err != nil { + return nil, errs.Wrap(err) + } + constraintEndTime.Set(*t) + } + webhookID := nullable.NewNullNullable[uuid.UUID]() + if row.WebhookID != nil { + webhookID.Set(*row.WebhookID) + } + anonymizeAt := nullable.NewNullNullable[time.Time]() + if row.AnonymizeAt != nil { + anonymizeAt.Set(*row.AnonymizeAt) + } + anonymizedAt := nullable.NewNullNullable[time.Time]() + if row.AnonymizedAt != nil { + anonymizedAt.Set(*row.AnonymizedAt) + } + + var notableEventName string + var notableEventID nullable.Nullable[uuid.UUID] + notableEventID.SetNull() + if row.NotableEventID != nil { + notableEventID = nullable.NewNullableWithValue(*row.NotableEventID) + notableEventName = cache.EventNameByID[row.NotableEventID.String()] + } + + return &model.Campaign{ + ID: id, + CreatedAt: row.CreatedAt, + UpdatedAt: row.UpdatedAt, + CompanyID: companyID, + Company: company, + Name: name, + CloseAt: closeAt, + ClosedAt: closedAt, + AnonymizeAt: anonymizeAt, + AnonymizedAt: anonymizedAt, + SortField: sortField, + SortOrder: sortOrder, + SendStartAt: sendStartAt, + SendEndAt: sendEndAt, + ConstraintWeekDays: constraintWeekDays, + ConstraintStartTime: constraintStartTime, + ConstraintEndTime: constraintEndTime, + SaveSubmittedData: saveSubmittedData, + IsAnonymous: isAnonymous, + IsTest: isTest, + TemplateID: templateID, + Template: template, + RecipientGroups: recipientGroups, + RecipientGroupIDs: nullable.NewNullableWithValue(recipientGroupIDs), + AllowDeny: allowDeny, + DenyPage: denyPage, + DenyPageID: denyPageID, + WebhookID: webhookID, + NotableEventID: notableEventID, + NotableEventName: notableEventName, + }, nil +} + +func ToCampaignEvent(row *database.CampaignEvent) (*model.CampaignEvent, error) { + var recipient *model.Recipient + if row.Recipient != nil { + r, err := ToRecipient(row.Recipient) + if err != nil { + return nil, errs.Wrap(err) + } + recipient = r + } + ip := vo.NewOptionalString64Must(row.IPAddress) + userAgent := vo.NewOptionalString255Must(row.UserAgent) + data := vo.NewOptionalString1MBMust(row.Data) + + return &model.CampaignEvent{ + ID: row.ID, + CreatedAt: row.CreatedAt, + CampaignID: row.CampaignID, + IP: ip, + UserAgent: userAgent, + Data: data, + AnonymizedID: row.AnonymizedID, + RecipientID: row.RecipientID, + EventID: row.EventID, + Recipient: recipient, + }, nil +} + +func ToRecipientCampaignEvent(row *database.RecipientCampaignEventView) (*model.RecipientCampaignEvent, error) { + campaignEvent, err := ToCampaignEvent(&row.CampaignEvent) + if err != nil { + return nil, errs.Wrap(err) + } + return &model.RecipientCampaignEvent{ + CampaignEvent: *campaignEvent, + Name: row.Name, + CampaignName: row.CampaignName, + }, nil +} + +func appendWhereCampaignIsActive(db *gorm.DB) *gorm.DB { + return db.Where( + fmt.Sprintf( + "((%s <= ? OR %s IS NULL) AND %s IS NULL)", + TableColumn(database.CAMPAIGN_TABLE, "send_start_at"), + TableColumn(database.CAMPAIGN_TABLE, "send_start_at"), + TableColumn(database.CAMPAIGN_TABLE, "closed_at"), + ), + utils.NowRFC3339UTC(), + ) +} + +// InsertCampaignStats inserts campaign statistics when a campaign is closed +func (r *Campaign) InsertCampaignStats(ctx context.Context, stats *database.CampaignStats) error { + return r.DB.WithContext(ctx).Create(stats).Error +} + +// GetCampaignStats retrieves campaign statistics by campaign ID +func (r *Campaign) GetCampaignStats(ctx context.Context, campaignID *uuid.UUID) (*database.CampaignStats, error) { + var stats database.CampaignStats + res := r.DB.WithContext(ctx).Where("campaign_id = ?", campaignID).First(&stats) + if res.Error != nil { + return nil, res.Error + } + return &stats, nil +} + +// GetAllCampaignStats retrieves campaign statistics with pagination and filtering +func (r *Campaign) GetAllCampaignStats(ctx context.Context, companyID *uuid.UUID, options *vo.QueryArgs) ([]database.CampaignStats, error) { + var stats []database.CampaignStats + + db := r.DB.WithContext(ctx) + + if companyID != nil { + db = db.Where("company_id = ?", companyID) + } + + if options != nil { + if options.Search != "" { + db = db.Where("campaign_name ILIKE ?", "%"+options.Search+"%") + } + + if options.OrderBy != "" { + sortColumn := options.OrderBy + if options.Desc { + sortColumn += " DESC" + } + db = db.Order(sortColumn) + } else { + db = db.Order("campaign_closed_at DESC") + } + + if options.Limit > 0 { + db = db.Offset(options.Offset).Limit(options.Limit) + } + } + + res := db.Find(&stats) + return stats, res.Error +} + +// GetCampaignStatsCount returns the total count of campaign statistics +func (r *Campaign) GetCampaignStatsCount(ctx context.Context, companyID *uuid.UUID, search string) (int64, error) { + var count int64 + + db := r.DB.WithContext(ctx).Model(&database.CampaignStats{}) + + if companyID != nil { + db = db.Where("company_id = ?", companyID) + } + + if search != "" { + db = db.Where("campaign_name ILIKE ?", "%"+search+"%") + } + + res := db.Count(&count) + return count, res.Error +} + +// DeleteCampaignStats deletes campaign statistics by campaign ID +func (r *Campaign) DeleteCampaignStats(ctx context.Context, campaignID *uuid.UUID) error { + res := r.DB.WithContext(ctx).Where("campaign_id = ?", campaignID).Delete(&database.CampaignStats{}) + return res.Error +} diff --git a/backend/repository/campaignRecipient.go b/backend/repository/campaignRecipient.go new file mode 100644 index 0000000..3dd262a --- /dev/null +++ b/backend/repository/campaignRecipient.go @@ -0,0 +1,557 @@ +package repository + +import ( + "context" + "fmt" + "time" + + "github.com/google/uuid" + "github.com/oapi-codegen/nullable" + "github.com/phishingclub/phishingclub/cache" + "github.com/phishingclub/phishingclub/data" + "github.com/phishingclub/phishingclub/database" + "github.com/phishingclub/phishingclub/errs" + "github.com/phishingclub/phishingclub/model" + "github.com/phishingclub/phishingclub/utils" + "github.com/phishingclub/phishingclub/vo" + "gorm.io/gorm" +) + +var allowedCampaignRecipientColumns = []string{ + "campaign_recipients.created_at", + "campaign_recipients.updated_at", + "campaign_recipients.send_at", + "campaign_recipients.sent_at", + "campaign_recipients.cancelled_at", + "campaign_recipients.notable_event_id", + "recipients.first_name", + "recipients.last_name", + "recipients.email", +} + +// CampaignRecipientOption is options for preloading +type CampaignRecipientOption struct { + *vo.QueryArgs + WithCampaign bool + WithRecipient bool +} + +// CampaignRecipient is a CampaignRecipient repository +// this holds campaign-recipients and their campaign results +type CampaignRecipient struct { + DB *gorm.DB +} + +// Preload preloads the campaign recipients +func (r *CampaignRecipient) preload(db *gorm.DB, options *CampaignRecipientOption) *gorm.DB { + if options.WithRecipient { + db = db.Preload("Recipient") + } + if options.WithCampaign { + db = db.Preload("Campaign") + } + return db +} + +// Cancel cancels recipients +func (r *CampaignRecipient) Cancel( + ctx context.Context, + campaignRecipientUUIDs []*uuid.UUID, +) error { + if len(campaignRecipientUUIDs) == 0 { + return nil + } + row := map[string]any{ + "cancelled_at": utils.NowRFC3339UTC(), + } + AddUpdatedAt(row) + result := r.DB. + Model(&database.CampaignRecipient{}). + Where( + fmt.Sprintf( + "%s IN ?", + TableColumnID(database.CAMPAIGN_RECIPIENT_TABLE_NAME), + ), + UUIDsToStrings(campaignRecipientUUIDs), + ). + Updates(row) + + if result.Error != nil { + return result.Error + } + // set notable event + if len(campaignRecipientUUIDs) == 0 { + return nil + } + row = map[string]any{ + "notable_event_id": cache.EventIDByName[data.EVENT_CAMPAIGN_RECIPIENT_CANCELLED], + } + AddUpdatedAt(row) + result = r.DB. + Model(&database.CampaignRecipient{}). + Where( + fmt.Sprintf( + "%s IN ? AND sent_at IS NULL AND cancelled_at IS NOT NULL", + TableColumnID(database.CAMPAIGN_RECIPIENT_TABLE_NAME), + ), + UUIDsToStrings(campaignRecipientUUIDs), + ). + Where( + "notable_event_id IS NULL OR notable_event_id IS ?", + cache.EventIDByName[data.EVENT_CAMPAIGN_RECIPIENT_SCHEDULED], + ). + Updates(row) + + if result.Error != nil { + return result.Error + } + + return nil +} + +// Insert inserts a new campaign recipient +func (r *CampaignRecipient) Insert( + ctx context.Context, + campaignRecipient *model.CampaignRecipient, + //campaignRecipient *database.CampaignRecipient, +) (*uuid.UUID, error) { + id := uuid.New() + row := campaignRecipient.ToDBMap() + row["id"] = id + AddTimestamps(row) + + res := r.DB. + Model(&database.CampaignRecipient{}). + Create(row) + + if res.Error != nil { + return nil, res.Error + } + return &id, nil +} + +// DeleteRecipientsNotIn deletes recipients in campaign that are +// not in the slice recipient ids supplied +func (r *CampaignRecipient) DeleteRecipientsNotIn( + ctx context.Context, + campaignID *uuid.UUID, + recipientIDs []*uuid.UUID, +) error { + res := r.DB. + Where( + fmt.Sprintf("%s = ?", TableColumn(database.CAMPAIGN_RECIPIENT_TABLE_NAME, "campaign_id")), + campaignID, + ). + Where( + fmt.Sprintf("%s NOT IN ?", TableColumn(database.CAMPAIGN_RECIPIENT_TABLE_NAME, "recipient_id")), + UUIDsToStrings(recipientIDs), + ). + Delete(&database.CampaignRecipient{}) + + if res.Error != nil { + return res.Error + } + return nil +} + +// GetRecipiensByCampaignID gets all campaignrecipients by campaign id +func (r *CampaignRecipient) GetByCampaignID( + ctx context.Context, + campaignID *uuid.UUID, + options *CampaignRecipientOption, +) ([]*model.CampaignRecipient, error) { + recps := []*model.CampaignRecipient{} + db, err := useQuery(r.DB, database.CAMPAIGN_TABLE, options.QueryArgs, allowedCampaignRecipientColumns...) + if err != nil { + return recps, errs.Wrap(err) + } + db = r.preload(db, options) + var dbCampaignRecipients []database.CampaignRecipient + res := db. + Joins("LEFT JOIN recipients ON recipients.id = campaign_recipients.recipient_id"). + Where( + fmt.Sprintf("%s = ?", TableColumn(database.CAMPAIGN_RECIPIENT_TABLE_NAME, "campaign_id")), + campaignID, + ). + Find(&dbCampaignRecipients) + + if res.Error != nil { + return recps, res.Error + } + for _, dbCampaignRecipient := range dbCampaignRecipients { + r, err := ToCampaignRecipient(&dbCampaignRecipient) + if err != nil { + return recps, nil + } + recps = append(recps, r) + } + return recps, nil +} + +// GetByID gets a campaign recipient by id +func (r *CampaignRecipient) GetByID( + ctx context.Context, + id *uuid.UUID, + options *CampaignRecipientOption, +) (*model.CampaignRecipient, error) { + db := r.preload(r.DB, options) + db, err := useQuery(db, database.CAMPAIGN_RECIPIENT_TABLE_NAME, options.QueryArgs) + if err != nil { + return nil, errs.Wrap(err) + } + var dbCampaignRecipient database.CampaignRecipient + res := db. + Where( + fmt.Sprintf("%s = ?", TableColumnID(database.CAMPAIGN_RECIPIENT_TABLE_NAME)), + id.String(), + ). + First(&dbCampaignRecipient) + + if res.Error != nil { + return nil, res.Error + } + return ToCampaignRecipient(&dbCampaignRecipient) +} + +// GetByCampaignAndRecipientID gets a campaign recipient by campaign and recipient id +func (r *CampaignRecipient) GetByCampaignAndRecipientID( + ctx context.Context, + campaignID *uuid.UUID, + recipientID *uuid.UUID, + options *CampaignRecipientOption, +) (*model.CampaignRecipient, error) { + db := r.preload(r.DB, options) + db, err := useQuery(db, database.CAMPAIGN_RECIPIENT_TABLE_NAME, options.QueryArgs) + if err != nil { + return nil, errs.Wrap(err) + } + var dbCampaignRecipient database.CampaignRecipient + res := db. + Where( + fmt.Sprintf( + "%s = ?", TableColumn(database.CAMPAIGN_RECIPIENT_TABLE_NAME, "campaign_id"), + ), + campaignID.String(), + ). + Where( + fmt.Sprintf( + "%s = ?", TableColumn(database.CAMPAIGN_RECIPIENT_TABLE_NAME, "recipient_id"), + ), + recipientID.String(), + ). + First(&dbCampaignRecipient) + + if res.Error != nil { + return nil, res.Error + } + return ToCampaignRecipient(&dbCampaignRecipient) +} + +// GetByCampaignRecipientID gets a campaign and recipient by campaign recipient id +func (r *CampaignRecipient) GetByCampaignRecipientID( + ctx context.Context, + id *uuid.UUID, +) (*model.CampaignRecipient, error) { + var dbCampaignRecipient database.CampaignRecipient + res := r.DB. + Where( + fmt.Sprintf("%s = ?", TableColumnID(database.CAMPAIGN_RECIPIENT_TABLE_NAME)), + id.String(), + ). + First(&dbCampaignRecipient) + + if res.Error != nil { + return nil, res.Error + } + return ToCampaignRecipient(&dbCampaignRecipient) +} + +// GetUnsendRecipients gets all campaign recipients that are not sent +// and have been attempted or been cancelled +// if limit is larger than 0 it will limit the number of results +func (r *CampaignRecipient) GetUnsendRecipients( + ctx context.Context, + limit int, + options *CampaignRecipientOption, +) ([]*model.CampaignRecipient, error) { + recps := []*model.CampaignRecipient{} + db := r.preload(r.DB, options) + db, err := useQuery(db, database.CAMPAIGN_RECIPIENT_TABLE_NAME, options.QueryArgs) + if err != nil { + return recps, errs.Wrap(err) + } + var dbCampaignRecipients []database.CampaignRecipient + + q := db.Where( + fmt.Sprintf( + "%s IS NULL AND %s IS NULL", + TableColumn(database.CAMPAIGN_RECIPIENT_TABLE_NAME, "cancelled_at"), + TableColumn(database.CAMPAIGN_RECIPIENT_TABLE_NAME, "last_attempt_at"), + ), + ) + if limit > 0 { + q = q.Limit(limit) + } + res := q. + Find(&dbCampaignRecipients) + + if res.Error != nil { + return recps, res.Error + } + for _, dbCampaignRecipient := range dbCampaignRecipients { + r, err := ToCampaignRecipient(&dbCampaignRecipient) + if err != nil { + return nil, errs.Wrap(err) + } + recps = append(recps, r) + } + return recps, nil +} + +// GetUnsendRecipientsForSending gets all campaign recipients that are not sent +// and have not reached the max send attempts or been cancelled +// the limit is only used if it is larger than 0 +func (r *CampaignRecipient) GetUnsendRecipientsForSending( + ctx context.Context, + limit int, + options *CampaignRecipientOption, +) ([]*model.CampaignRecipient, error) { + recps := []*model.CampaignRecipient{} + db := r.preload(r.DB, options) + db, err := useQuery(db, database.CAMPAIGN_RECIPIENT_TABLE_NAME, options.QueryArgs) + if err != nil { + return recps, errs.Wrap(err) + } + var dbCampaignRecipients []database.CampaignRecipient + q := db. + Where( + fmt.Sprintf( + "%s IS NULL"+ + " AND %s <= ?"+ + " AND %s IS NULL"+ + " AND %s IS NULL"+ + " AND %s = false", + TableColumn(database.CAMPAIGN_RECIPIENT_TABLE_NAME, "sent_at"), + TableColumn(database.CAMPAIGN_RECIPIENT_TABLE_NAME, "send_at"), + TableColumn(database.CAMPAIGN_RECIPIENT_TABLE_NAME, "cancelled_at"), + TableColumn(database.CAMPAIGN_RECIPIENT_TABLE_NAME, "last_attempt_at"), + TableColumn(database.CAMPAIGN_RECIPIENT_TABLE_NAME, "self_managed"), + ), utils.NowRFC3339UTC(), + ) + + if limit > 0 { + q = q.Limit(limit) + } + res := q. + Find(&dbCampaignRecipients) + + if res.Error != nil { + return recps, res.Error + } + for _, dbCampaignRecipient := range dbCampaignRecipients { + r, err := ToCampaignRecipient(&dbCampaignRecipient) + if err != nil { + return recps, errs.Wrap(err) + } + recps = append(recps, r) + } + return recps, nil +} + +// DeleteByCampaigID removes all campaign recipients from a campaign +func (r *CampaignRecipient) DeleteByCampaigID( + ctx context.Context, + campaignID *uuid.UUID, +) error { + res := r.DB. + Where( + fmt.Sprintf( + "%s = ?", TableColumn(database.CAMPAIGN_RECIPIENT_TABLE_NAME, "campaign_id"), + ), + campaignID, + ). + Delete(&database.CampaignRecipient{}) + + if res.Error != nil { + return res.Error + } + return nil +} + +// UpdateByID updates a campaign recipient by id +func (c *CampaignRecipient) UpdateByID( + ctx context.Context, + id *uuid.UUID, + campaignRecipient *model.CampaignRecipient, +) error { + row := campaignRecipient.ToDBMap() + AddUpdatedAt(row) + + res := c.DB. + Model(&database.CampaignRecipient{}). + Where( + fmt.Sprintf( + "%s = ?", TableColumnID(database.CAMPAIGN_RECIPIENT_TABLE_NAME), + ), + id.String(), + ). + Updates(row) + + if res.Error != nil { + return res.Error + } + return nil +} + +// Anonymize adds an anonymized id to a campaign recipient +func (r *CampaignRecipient) Anonymize( + ctx context.Context, + recipientID *uuid.UUID, + anonymizedID *uuid.UUID, +) error { + row := map[string]interface{}{ + "anonymized_id": anonymizedID.String(), + } + AddUpdatedAt(row) + res := r.DB. + Model(&database.CampaignRecipient{}). + Where("recipient_id = ?", recipientID). + Updates(row) + + if res.Error != nil { + return res.Error + } + return nil +} + +func (r *CampaignRecipient) CancelInActiveCampaigns( + ctx context.Context, + recipientID *uuid.UUID, +) error { + row := map[string]any{ + "cancelled_at": utils.NowRFC3339UTC(), + } + AddUpdatedAt(row) + subSelect := r.DB.Table(database.CAMPAIGN_TABLE).Select("id") + subSelect = appendWhereCampaignIsActive(subSelect) + + res := r.DB. + Model(&database.CampaignRecipient{}). + Where("campaign_id IN (?)", subSelect). + Where("recipient_id = ?", recipientID). + Updates(row) + if res.Error != nil { + return res.Error + } + return nil +} + +// RemoveRecipientIDByCampaignID removes a recipient id from all campaign recipients +// related to a campaign, this is used when anonymizing a campaign +func (r *CampaignRecipient) RemoveRecipientIDByCampaignID( + ctx context.Context, + campaignID *uuid.UUID, +) error { + row := map[string]interface{}{ + "recipient_id": nil, + } + AddUpdatedAt(row) + res := r.DB. + Model(&database.CampaignRecipient{}). + Where("campaign_id = ?", campaignID). + Updates(row) + if res.Error != nil { + return res.Error + } + return nil +} + +// RemoveRecipientIDByRecipientID removes a recipient id from a campaign recipient +func (r *CampaignRecipient) RemoveRecipientIDByRecipientID( + ctx context.Context, + recipientID *uuid.UUID, +) error { + row := map[string]interface{}{ + "recipient_id": nil, + } + AddUpdatedAt(row) + res := r.DB. + Model(&database.CampaignRecipient{}). + Where("recipient_id = ?", recipientID). + Updates(row) + if res.Error != nil { + return res.Error + } + return nil +} + +// ToCampaignRecipient converts a database campaign recipient to a model campaign recipient +func ToCampaignRecipient(row *database.CampaignRecipient) (*model.CampaignRecipient, error) { + id := nullable.NewNullableWithValue(*row.ID) + var cancelledAt nullable.Nullable[time.Time] + cancelledAt.SetNull() + if row.CancelledAt != nil { + cancelledAt = nullable.NewNullableWithValue(*row.CancelledAt) + } + var sendAt nullable.Nullable[time.Time] + sendAt.SetNull() + if row.SendAt != nil { + sendAt = nullable.NewNullableWithValue(*row.SendAt) + } + var sentAt nullable.Nullable[time.Time] + sentAt.SetNull() + if row.SentAt != nil { + sentAt = nullable.NewNullableWithValue(*row.SentAt) + } + var lastAttemptAt nullable.Nullable[time.Time] + lastAttemptAt.SetNull() + if row.LastAttemptAt != nil { + lastAttemptAt = nullable.NewNullableWithValue(*row.LastAttemptAt) + } + selfManaged := nullable.NewNullableWithValue(row.SelfManaged) + campaignID := nullable.NewNullableWithValue(*row.CampaignID) + var recipientID nullable.Nullable[uuid.UUID] + recipientID.SetNull() + if row.RecipientID != nil { + recipientID = nullable.NewNullableWithValue(*row.RecipientID) + } + var anonymizedID nullable.Nullable[uuid.UUID] + anonymizedID.SetNull() + if row.AnonymizedID != nil { + anonymizedID = nullable.NewNullableWithValue(*row.AnonymizedID) + } + var recipient *model.Recipient + if row.Recipient != nil { + r, err := ToRecipient(row.Recipient) + if err != nil { + return nil, errs.Wrap(err) + } + recipient = r + } + var campaign *model.Campaign + if row.Campaign != nil { + campaign, _ = ToCampaign(row.Campaign) + } + var notableEventName string + var notableEventID nullable.Nullable[uuid.UUID] + notableEventID.SetNull() + if row.NotableEventID != nil { + notableEventID = nullable.NewNullableWithValue(*row.NotableEventID) + notableEventName = cache.EventNameByID[row.NotableEventID.String()] + } + return &model.CampaignRecipient{ + ID: id, + CancelledAt: cancelledAt, + SendAt: sendAt, + SentAt: sentAt, + LastAttemptAt: lastAttemptAt, + SelfManaged: selfManaged, + CampaignID: campaignID, + Campaign: campaign, + AnonymizedID: anonymizedID, + RecipientID: recipientID, + Recipient: recipient, + NotableEventID: notableEventID, + NotableEventName: notableEventName, + }, nil +} diff --git a/backend/repository/campaignTemplate.go b/backend/repository/campaignTemplate.go new file mode 100644 index 0000000..82ca04d --- /dev/null +++ b/backend/repository/campaignTemplate.go @@ -0,0 +1,810 @@ +package repository + +import ( + "context" + "fmt" + "strings" + + "github.com/google/uuid" + "github.com/oapi-codegen/nullable" + "github.com/phishingclub/phishingclub/database" + "github.com/phishingclub/phishingclub/errs" + "github.com/phishingclub/phishingclub/model" + "github.com/phishingclub/phishingclub/vo" + "gorm.io/gorm" +) + +var allowdCampaignTemplatesColumns = []string{ + TableColumn(database.CAMPAIGN_TEMPLATE_TABLE, "created_at"), + TableColumn(database.CAMPAIGN_TEMPLATE_TABLE, "updated_at"), + TableColumn(database.CAMPAIGN_TEMPLATE_TABLE, "name"), + TableColumn(database.CAMPAIGN_TEMPLATE_TABLE, "after_landing_page_redirect_url"), + TableColumn(database.CAMPAIGN_TEMPLATE_TABLE, "is_usable"), + TableColumn(database.DOMAIN_TABLE, "name"), + TableColumn("before_landing_page", "name"), + TableColumn("landing_page", "name"), + TableColumn("after_landing_page", "name"), + TableColumn(database.EMAIL_TABLE, "name"), + TableColumn(database.SMTP_CONFIGURATION_TABLE, "name"), + TableColumn(database.API_SENDER_TABLE, "name"), +} + +type CampaignTemplateOption struct { + *vo.QueryArgs + Columns []string + + UsableOnly bool + + WithCompany bool + WithDomain bool + WithLandingPage bool + WithBeforeLandingPage bool + WithAfterLandingPage bool + WithEmail bool + WithSMTPConfiguration bool + WithAPISender bool + // url and cookie keys + WithIdentifier bool +} + +// CampaignTemplate is a campaign template repository +type CampaignTemplate struct { + DB *gorm.DB +} + +// load applies the preloading options +func (r CampaignTemplate) load(o *CampaignTemplateOption, db *gorm.DB) *gorm.DB { + if o == nil { + return db + } + if o.WithCompany { + db = db.Preload("Company") + } + if o.WithDomain { + if len(o.Columns) > 0 { + db = db.Joins(LeftJoinOn(database.CAMPAIGN_TEMPLATE_TABLE, "domain_id", database.DOMAIN_TABLE, "id")) + } else { + db = db.Joins("Domain") + } + } + if o.WithLandingPage { + + if len(o.Columns) > 0 { + db = db.Joins(LeftJoinOnWithAlias( + database.CAMPAIGN_TEMPLATE_TABLE, + "landing_page_id", + database.PAGE_TABLE, + "id", + "landing_page", + )) + } else { + db = db.Preload("LandingPage") + } + } + if o.WithBeforeLandingPage { + if len(o.Columns) > 0 { + db = db.Joins(LeftJoinOnWithAlias( + database.CAMPAIGN_TEMPLATE_TABLE, + "before_landing_page_id", + database.PAGE_TABLE, + "id", + "before_landing_page", + )) + } else { + db = db.Preload("BeforeLandingPage") + } + } + if o.WithAfterLandingPage { + if len(o.Columns) > 0 { + db = db.Joins(LeftJoinOnWithAlias( + database.CAMPAIGN_TEMPLATE_TABLE, + "after_landing_page_id", + database.PAGE_TABLE, + "id", + "after_landing_page", + )) + } else { + db = db.Preload("AfterLandingPage") + } + } + if o.WithEmail { + if len(o.Columns) > 0 { + + db = db.Joins(LeftJoinOn(database.CAMPAIGN_TEMPLATE_TABLE, "email_id", database.EMAIL_TABLE, "id")) + } else { + db = db.Preload("Email") + + } + } + if o.WithSMTPConfiguration { + if len(o.Columns) > 0 { + db = db.Joins(LeftJoinOn(database.CAMPAIGN_TEMPLATE_TABLE, "smtp_configuration_id", database.SMTP_CONFIGURATION_TABLE, "id")) + } else { + db = db.Preload("SMTPConfiguration") + + } + } + if o.WithAPISender { + if len(o.Columns) > 0 { + db = db.Joins(LeftJoinOn(database.CAMPAIGN_TEMPLATE_TABLE, "api_sender_id", database.API_SENDER_TABLE, "id")) + } else { + db = db.Preload("APISender") + } + } + if o.WithIdentifier { + db = db.Preload("URLIdentifier") + db = db.Preload("StateIdentifier") + } + return db +} + +// Insert inserts a new campaign template +func (r *CampaignTemplate) Insert( + ctx context.Context, + campaignTemplate *model.CampaignTemplate, +) (*uuid.UUID, error) { + id := uuid.New() + row := campaignTemplate.ToDBMap() + row["id"] = id + AddTimestamps(row) + + res := r.DB. + Model(&database.CampaignTemplate{}). + Create(row) + + if res.Error != nil { + return nil, res.Error + } + return &id, nil +} + +// GetByID gets a campaign template by id +func (r *CampaignTemplate) GetByID( + ctx context.Context, + id *uuid.UUID, + options *CampaignTemplateOption, +) (*model.CampaignTemplate, error) { + db := r.load(options, r.DB) + var tmpl database.CampaignTemplate + res := db. + Where( + TableColumnID(database.CAMPAIGN_TEMPLATE_TABLE)+" = ?", + id.String(), + ). + First(&tmpl) + + if res.Error != nil { + return nil, res.Error + } + return ToCampaignTemplate(&tmpl) +} + +// GetByNameAndCompanyID gets a campaign template by name and company ID +func (r *CampaignTemplate) GetByNameAndCompanyID( + ctx context.Context, + name string, + companyID *uuid.UUID, + options *CampaignTemplateOption, +) (*model.CampaignTemplate, error) { + db := r.load(options, r.DB) + var tmpl database.CampaignTemplate + db = withCompanyIncludingNullContext(db, companyID, database.CAMPAIGN_TEMPLATE_TABLE) + res := db. + Where( + fmt.Sprintf( + "%s = ?", + TableColumn(database.CAMPAIGN_TEMPLATE_TABLE, "name"), + ), + name, + ). + First(&tmpl) + + if res.Error != nil { + return nil, res.Error + } + return ToCampaignTemplate(&tmpl) +} + +// GetAll gets all campaign templates +func (r *CampaignTemplate) GetAll( + ctx context.Context, + companyID *uuid.UUID, + options *CampaignTemplateOption, +) (*model.Result[model.CampaignTemplate], error) { + result := model.NewEmptyResult[model.CampaignTemplate]() + db := r.DB + if options.Columns != nil && len(options.Columns) > 0 { + db = db.Select(strings.Join(options.Columns, ",")) + } + db = r.load(options, db) + db = withCompanyIncludingNullContext(db, companyID, database.CAMPAIGN_TEMPLATE_TABLE) + db, err := useQuery(db, database.CAMPAIGN_TEMPLATE_TABLE, options.QueryArgs, allowdCampaignTemplatesColumns...) + if err != nil { + return result, errs.Wrap(err) + } + if options.UsableOnly { + db.Where( + fmt.Sprintf("%s = ?", + TableColumn( + database.CAMPAIGN_TEMPLATE_TABLE, + "is_usable", + ), + ), + true, + ) + } + var tmpl []database.CampaignTemplate + res := db.Find(&tmpl) + + if res.Error != nil { + return result, res.Error + } + + hasNextPage, err := useHasNextPage( + db, + database.CAMPAIGN_TEMPLATE_TABLE, + options.QueryArgs, + allowdCampaignTemplatesColumns..., + ) + if err != nil { + return result, errs.Wrap(err) + } + result.HasNextPage = hasNextPage + + for _, t := range tmpl { + tmpl, err := ToCampaignTemplate(&t) + if err != nil { + return nil, errs.Wrap(err) + } + result.Rows = append(result.Rows, tmpl) + } + return result, nil +} + +// GetAllByCompanyID gets all campaign templates by company id +func (r *CampaignTemplate) GetAllByCompanyID( + ctx context.Context, + companyID *uuid.UUID, + options *CampaignTemplateOption, +) (*model.Result[model.CampaignTemplate], error) { + result := model.NewEmptyResult[model.CampaignTemplate]() + db := r.DB + if options.Columns != nil && len(options.Columns) > 0 { + db = db.Select(strings.Join(options.Columns, ",")) + } + db = r.load(options, db) + db = whereCompany(db, database.CAMPAIGN_TEMPLATE_TABLE, companyID) + db, err := useQuery( + db, + database.CAMPAIGN_TEMPLATE_TABLE, + options.QueryArgs, + allowdCampaignTemplatesColumns..., + ) + if err != nil { + return result, errs.Wrap(err) + } + if options.UsableOnly { + db.Where( + fmt.Sprintf("%s = ?", + TableColumn( + database.CAMPAIGN_TEMPLATE_TABLE, + "is_usable", + ), + ), + true, + ) + } + var tmpl []database.CampaignTemplate + res := db.Find(&tmpl) + + if res.Error != nil { + return result, res.Error + } + + hasNextPage, err := useHasNextPage( + db, + database.CAMPAIGN_TEMPLATE_TABLE, + options.QueryArgs, + allowdCampaignTemplatesColumns..., + ) + if err != nil { + return result, errs.Wrap(err) + } + result.HasNextPage = hasNextPage + + for _, t := range tmpl { + tmpl, err := ToCampaignTemplate(&t) + if err != nil { + return nil, errs.Wrap(err) + } + result.Rows = append(result.Rows, tmpl) + } + return result, nil +} + +// GetBySmtpID gets all campaign templates by smtp configuration ID +// does not support Result based return +func (r *CampaignTemplate) GetBySmtpID( + ctx context.Context, + smtpID *uuid.UUID, + options *CampaignTemplateOption, +) ([]*model.CampaignTemplate, error) { + db := r.DB + if options.Columns != nil && len(options.Columns) > 0 { + db = db.Select(strings.Join(options.Columns, ",")) + } + db = r.load(options, db) + db, err := useQuery(db, database.CAMPAIGN_TEMPLATE_TABLE, options.QueryArgs, allowdCampaignTemplatesColumns...) + if err != nil { + return nil, errs.Wrap(err) + } + db = db.Where( + fmt.Sprintf( + "%s = ?", + TableColumn(database.CAMPAIGN_TEMPLATE_TABLE, "smtp_configuration_id"), + ), + smtpID.String(), + ) + if options.UsableOnly { + db.Where( + fmt.Sprintf("%s = ?", + TableColumn( + database.CAMPAIGN_TEMPLATE_TABLE, + "is_usable", + ), + ), + true, + ) + } + var tmpl []database.CampaignTemplate + res := db.Find(&tmpl) + + if res.Error != nil { + return nil, res.Error + } + templates := []*model.CampaignTemplate{} + for _, t := range tmpl { + tmpl, err := ToCampaignTemplate(&t) + if err != nil { + return nil, errs.Wrap(err) + } + templates = append(templates, tmpl) + } + return templates, nil +} + +// GetByAPISenderID gets all campaign templates by API sender ID +// does not support Result based return +func (r *CampaignTemplate) GetByAPISenderID( + ctx context.Context, + apiSenderID *uuid.UUID, + options *CampaignTemplateOption, +) ([]*model.CampaignTemplate, error) { + db := r.DB + if options.Columns != nil && len(options.Columns) > 0 { + db = db.Select(strings.Join(options.Columns, ",")) + } + db = r.load(options, db) + db, err := useQuery(db, database.CAMPAIGN_TEMPLATE_TABLE, options.QueryArgs, allowdCampaignTemplatesColumns...) + if err != nil { + return nil, errs.Wrap(err) + } + db = db.Where( + fmt.Sprintf( + "%s = ?", + TableColumn(database.CAMPAIGN_TEMPLATE_TABLE, "api_sender_id"), + ), + apiSenderID.String(), + ) + if options.UsableOnly { + db.Where( + fmt.Sprintf("%s = ?", + TableColumn( + database.CAMPAIGN_TEMPLATE_TABLE, + "is_usable", + ), + ), + true, + ) + } + var tmpl []database.CampaignTemplate + res := db.Find(&tmpl) + + if res.Error != nil { + return nil, res.Error + } + templates := []*model.CampaignTemplate{} + for _, t := range tmpl { + tmpl, err := ToCampaignTemplate(&t) + if err != nil { + return nil, errs.Wrap(err) + } + templates = append(templates, tmpl) + } + return templates, nil +} + +// GetByDomainID gets all campaign templates by domain ID +func (r *CampaignTemplate) GetByDomainID( + ctx context.Context, + domainID *uuid.UUID, + options *CampaignTemplateOption, +) ([]*model.CampaignTemplate, error) { + db := r.DB + if options.Columns != nil && len(options.Columns) > 0 { + db = db.Select(strings.Join(options.Columns, ",")) + } + db = r.load(options, db) + db, err := useQuery(db, database.CAMPAIGN_TEMPLATE_TABLE, options.QueryArgs, allowdCampaignTemplatesColumns...) + if err != nil { + return nil, errs.Wrap(err) + } + db = db.Where( + fmt.Sprintf( + "%s = ?", + TableColumn(database.CAMPAIGN_TEMPLATE_TABLE, "domain_id"), + ), + domainID.String(), + ) + if options.UsableOnly { + db.Where( + fmt.Sprintf("%s = ?", + TableColumn( + database.CAMPAIGN_TEMPLATE_TABLE, + "is_usable", + ), + ), + true, + ) + } + var tmpl []database.CampaignTemplate + res := db.Find(&tmpl) + + if res.Error != nil { + return nil, res.Error + } + templates := []*model.CampaignTemplate{} + for _, t := range tmpl { + tmpl, err := ToCampaignTemplate(&t) + if err != nil { + return nil, errs.Wrap(err) + } + templates = append(templates, tmpl) + } + return templates, nil +} + +// GetByPageID gets all campaign templates that uses a page ID +// in before, landing or after page. +func (r *CampaignTemplate) GetByPageID( + ctx context.Context, + pageID *uuid.UUID, + options *CampaignTemplateOption, +) ([]*model.CampaignTemplate, error) { + db := r.DB + if options.Columns != nil && len(options.Columns) > 0 { + db = db.Select(strings.Join(options.Columns, ",")) + } + db = r.load(options, db) + db, err := useQuery(db, database.CAMPAIGN_TEMPLATE_TABLE, options.QueryArgs, allowdCampaignTemplatesColumns...) + if err != nil { + return nil, errs.Wrap(err) + } + db = db.Where( + fmt.Sprintf( + "%s = ?", + TableColumn(database.CAMPAIGN_TEMPLATE_TABLE, "landing_page_id"), + ), + pageID.String(), + ).Or( + fmt.Sprintf( + "%s = ?", + TableColumn(database.CAMPAIGN_TEMPLATE_TABLE, "before_landing_page_id"), + ), + pageID.String(), + ).Or( + fmt.Sprintf( + "%s = ?", + TableColumn(database.CAMPAIGN_TEMPLATE_TABLE, "after_landing_page_id"), + ), + pageID.String(), + ) + if options.UsableOnly { + db.Where( + fmt.Sprintf("%s = ?", + TableColumn( + database.CAMPAIGN_TEMPLATE_TABLE, + "is_usable", + ), + ), + true, + ) + } + var tmpl []database.CampaignTemplate + res := db.Find(&tmpl) + + if res.Error != nil { + return nil, res.Error + } + templates := []*model.CampaignTemplate{} + for _, t := range tmpl { + tmpl, err := ToCampaignTemplate(&t) + if err != nil { + return nil, errs.Wrap(err) + } + templates = append(templates, tmpl) + } + return templates, nil +} + +// RemoveDomainIDFromAll removes the domain ID from all templates by domain ID +func (r *CampaignTemplate) RemoveDomainIDFromAll( + ctx context.Context, + domainID *uuid.UUID, +) error { + row := map[string]any{} + AddUpdatedAt(row) + row["domain_id"] = nil + row["is_usable"] = false + res := r.DB. + Model(&database.CampaignTemplate{}). + Where( + fmt.Sprintf( + "%s = ?", + TableColumn(database.CAMPAIGN_TEMPLATE_TABLE, "domain_id"), + ), + domainID.String(), + ). + Updates(row) + + if res.Error != nil { + return res.Error + } + return nil +} + +// RemoveAPISenderIDFromAll removes the smtp configuration ID from all templates by smtp configuration ID +func (r *CampaignTemplate) RemoveAPISenderIDFromAll( + ctx context.Context, + domainID *uuid.UUID, +) error { + row := map[string]any{} + AddUpdatedAt(row) + row["api_sender_id"] = nil + row["is_usable"] = false + res := r.DB. + Model(&database.CampaignTemplate{}). + Where( + fmt.Sprintf( + "%s = ?", + TableColumn(database.CAMPAIGN_TEMPLATE_TABLE, "api_sender_id"), + ), + domainID.String(), + ). + Updates(row) + + if res.Error != nil { + return res.Error + } + return nil +} + +// RemoveSmtpIDFromAll removes the smtp configuration ID from all templates by smtp configuration ID +func (r *CampaignTemplate) RemoveSmtpIDFromAll( + ctx context.Context, + domainID *uuid.UUID, +) error { + row := map[string]any{} + AddUpdatedAt(row) + row["smtp_configuration_id"] = nil + row["is_usable"] = false + res := r.DB. + Model(&database.CampaignTemplate{}). + Where( + fmt.Sprintf( + "%s = ?", + TableColumn(database.CAMPAIGN_TEMPLATE_TABLE, "smtp_configuration_id"), + ), + domainID.String(), + ). + Updates(row) + + if res.Error != nil { + return res.Error + } + return nil +} + +// RemovePageIDFromAll removes the page ID from any matching columns +// landing_page_id, before_landing_page_id and after_landing_page_id +func (r *CampaignTemplate) RemovePageIDFromAll( + ctx context.Context, + pageID *uuid.UUID, +) error { + columns := []string{"before_landing_page_id", "after_landing_page_id", "landing_page_id"} + for _, column := range columns { + row := map[string]any{} + AddUpdatedAt(row) + row[column] = nil + row["is_usable"] = false + res := r.DB. + Model(&database.CampaignTemplate{}). + Where( + fmt.Sprintf( + "%s = ?", + TableColumn(database.CAMPAIGN_TEMPLATE_TABLE, column), + ), + pageID.String(), + ). + Updates(row) + + if res.Error != nil { + return res.Error + } + } + return nil +} + +// UpdateByID updates a campaign template by id +func (r *CampaignTemplate) UpdateByID( + ctx context.Context, + id *uuid.UUID, + campaignTemplate *model.CampaignTemplate, +) error { + row := campaignTemplate.ToDBMap() + AddUpdatedAt(row) + res := r.DB. + Model(&database.CampaignTemplate{}). + Where( + fmt.Sprintf( + "%s = ?", + TableColumnID(database.CAMPAIGN_TEMPLATE_TABLE), + ), + id.String(), + ). + Updates(row) + + if res.Error != nil { + return res.Error + } + return nil +} + +// DeleteByID deletes a campaign template by id +func (r *CampaignTemplate) DeleteByID( + ctx context.Context, + id *uuid.UUID, +) error { + res := r.DB. + Where("id = ?", id). + Delete(&database.CampaignTemplate{}) + if res.Error != nil { + return res.Error + } + return nil +} + +func ToCampaignTemplate(row *database.CampaignTemplate) (*model.CampaignTemplate, error) { + id := nullable.NewNullableWithValue(*row.ID) + companyID := nullable.NewNullNullable[uuid.UUID]() + if row.CompanyID != nil { + companyID.Set(*row.CompanyID) + } + name := nullable.NewNullableWithValue(*vo.NewString64Must(row.Name)) + domainID := nullable.NewNullNullable[uuid.UUID]() + if row.DomainID != nil { + domainID.Set(*row.DomainID) + } + var domain *model.Domain + if row.Domain != nil { + domain = ToDomain(row.Domain) + } + var beforeLandingPage *model.Page + if row.BeforeLandingPage != nil { + p, err := ToPage(row.BeforeLandingPage) + if err != nil { + return nil, errs.Wrap(err) + } + beforeLandingPage = p + } + beforeLandingPageID := nullable.NewNullNullable[uuid.UUID]() + if row.BeforeLandingPageID != nil { + beforeLandingPageID.Set(*row.BeforeLandingPageID) + } + var landingPage *model.Page + if row.LandingPage != nil { + p, err := ToPage(row.LandingPage) + if err != nil { + return nil, errs.Wrap(err) + } + landingPage = p + } + landingPageID := nullable.NewNullNullable[uuid.UUID]() + if row.LandingPageID != nil { + landingPageID.Set(*row.LandingPageID) + } + var afterLandingPage *model.Page + if row.AfterLandingPage != nil { + p, err := ToPage(row.AfterLandingPage) + if err != nil { + return nil, errs.Wrap(err) + } + afterLandingPage = p + } + afterLandingPageID := nullable.NewNullNullable[uuid.UUID]() + if row.AfterLandingPageID != nil { + afterLandingPageID.Set(*row.AfterLandingPageID) + } + redirectURL := nullable.NewNullableWithValue(*vo.NewOptionalString255Must("")) + if row.AfterLandingPageRedirectURL != "" { + redirectURL.Set(*vo.NewOptionalString255Must(row.AfterLandingPageRedirectURL)) + } + emailID := nullable.NewNullNullable[uuid.UUID]() + if row.EmailID != nil { + emailID.Set(*row.EmailID) + } + var email *model.Email + if row.Email != nil { + email = ToEmail(row.Email) + } + smtpConfigurationID := nullable.NewNullNullable[uuid.UUID]() + if row.SMTPConfigurationID != nil { + smtpConfigurationID.Set(*row.SMTPConfigurationID) + } + var smtpConfiguration *model.SMTPConfiguration + if row.SMTPConfiguration != nil { + smtpConfiguration = ToSMTPConfiguration(row.SMTPConfiguration) + } + apiSenderID := nullable.NewNullNullable[uuid.UUID]() + if row.APISenderID != nil { + apiSenderID.Set(*row.APISenderID) + } + var apiSender *model.APISender + if row.APISender != nil { + var err error + apiSender, err = ToAPISender(row.APISender) + if err != nil { + return nil, errs.Wrap(err) + } + } + urlIdentifierID := nullable.NewNullableWithValue(row.URLIdentifierID) + var urlIdentifier *model.Identifier + if row.URLIdentifier != nil { + urlIdentifier = ToIdentifier(row.URLIdentifier) + } + stateIdentifierID := nullable.NewNullableWithValue(row.StateIdentifierID) + var stateIdentifier *model.Identifier + if row.StateIdentifier != nil { + stateIdentifier = ToIdentifier(row.StateIdentifier) + } + urlPath := nullable.NewNullableWithValue(*vo.NewURLPathMust(row.URLPath)) + + isUsable := nullable.NewNullableWithValue(row.IsUsable) + + return &model.CampaignTemplate{ + ID: id, + CompanyID: companyID, + Name: name, + DomainID: domainID, + Domain: domain, + BeforeLandingPageID: beforeLandingPageID, + BeforeLandingePage: beforeLandingPage, + LandingPageID: landingPageID, + LandingPage: landingPage, + AfterLandingPageID: afterLandingPageID, + AfterLandingPage: afterLandingPage, + AfterLandingPageRedirectURL: redirectURL, + EmailID: emailID, + Email: email, + SMTPConfigurationID: smtpConfigurationID, + SMTPConfiguration: smtpConfiguration, + APISenderID: apiSenderID, + APISender: apiSender, + URLIdentifierID: urlIdentifierID, + URLIdentifier: urlIdentifier, + StateIdentifierID: stateIdentifierID, + StateIdentifier: stateIdentifier, + URLPath: urlPath, + IsUsable: isUsable, + }, nil +} diff --git a/backend/repository/company.go b/backend/repository/company.go new file mode 100644 index 0000000..2a65d1f --- /dev/null +++ b/backend/repository/company.go @@ -0,0 +1,156 @@ +package repository + +import ( + "context" + "fmt" + + "github.com/google/uuid" + "github.com/oapi-codegen/nullable" + "github.com/phishingclub/phishingclub/database" + "github.com/phishingclub/phishingclub/errs" + "github.com/phishingclub/phishingclub/model" + "github.com/phishingclub/phishingclub/vo" + "gorm.io/gorm" +) + +var companyAllowedColumns = assignTableToColumns(database.COMPANY_TABLE, []string{ + "created_at", + "updated_at", + "name", +}) + +// Company is a Company repository +type Company struct { + DB *gorm.DB +} + +// Insert inserts a new company +func (r *Company) Insert( + ctx context.Context, + company *model.Company, +) (*uuid.UUID, error) { + id := uuid.New() + row := company.ToDBMap() + row["id"] = id + AddTimestamps(row) + + res := r.DB. + Model(&database.Company{}). + Create(&row) + + if res.Error != nil { + return nil, res.Error + } + return &id, nil +} + +// GetByName gets a company by name +func (r *Company) GetByName( + ctx context.Context, + name string, +) (*model.Company, error) { + var dbCompany database.Company + res := r.DB. + Where("name = ?", name). + First(&dbCompany) + + if res.Error != nil { + return nil, res.Error + } + return ToCompany(&dbCompany), nil +} + +// GetAll gets all companies with pagination +func (r *Company) GetAll( + ctx context.Context, + queryArgs *vo.QueryArgs, +) (*model.Result[model.Company], error) { + result := model.NewEmptyResult[model.Company]() + var dbCompanies []database.Company + db, err := useQuery(r.DB, database.COMPANY_TABLE, queryArgs, companyAllowedColumns...) + if err != nil { + return result, errs.Wrap(err) + } + dbRes := db. + Find(&dbCompanies) + + if dbRes.Error != nil { + return nil, dbRes.Error + } + + hasNextPage, err := useHasNextPage(db, database.COMPANY_TABLE, queryArgs, companyAllowedColumns...) + if err != nil { + return result, errs.Wrap(err) + } + result.HasNextPage = hasNextPage + + for _, dbCompany := range dbCompanies { + result.Rows = append(result.Rows, ToCompany(&dbCompany)) + } + return result, nil +} + +// GetByID gets a company by id +func (r *Company) GetByID( + ctx context.Context, + id *uuid.UUID, +) (*model.Company, error) { + var dbCompany database.Company + result := r.DB. + Where( + fmt.Sprintf("%s = ?", TableColumnID(database.COMPANY_TABLE)), + id.String(), + ). + First(&dbCompany) + + if result.Error != nil { + return nil, result.Error + } + return ToCompany(&dbCompany), nil +} + +// UpdateByID updates a company by id +func (r *Company) UpdateByID( + ctx context.Context, + id *uuid.UUID, + company *model.Company, +) error { + row := company.ToDBMap() + AddUpdatedAt(row) + res := r.DB. + Model(&database.Company{}). + Where( + fmt.Sprintf("%s = ?", TableColumnID(database.COMPANY_TABLE)), + id.String(), + ). + Updates(row) + + if res.Error != nil { + return res.Error + } + return nil +} + +// DeleteByID deletes a company +// returns the number of rows affected and an error +func (r *Company) DeleteByID( + ctx context.Context, + id *uuid.UUID, +) (int, error) { + result := r.DB.Delete(&database.Company{ID: *id}) + if result.Error != nil { + return 0, result.Error + } + return int(result.RowsAffected), nil +} + +func ToCompany(row *database.Company) *model.Company { + id := nullable.NewNullableWithValue(row.ID) + name := nullable.NewNullableWithValue(*vo.NewString64Must(row.Name)) + return &model.Company{ + ID: id, + CreatedAt: row.CreatedAt, + UpdatedAt: row.UpdatedAt, + Name: name, + } +} diff --git a/backend/repository/consts.go b/backend/repository/consts.go new file mode 100644 index 0000000..4887dbb --- /dev/null +++ b/backend/repository/consts.go @@ -0,0 +1,5 @@ +package repository + +const ( + NO_LIMIT = -1 +) diff --git a/backend/repository/domain.go b/backend/repository/domain.go new file mode 100644 index 0000000..4166fb8 --- /dev/null +++ b/backend/repository/domain.go @@ -0,0 +1,302 @@ +package repository + +import ( + "context" + + "github.com/google/uuid" + "github.com/oapi-codegen/nullable" + "github.com/phishingclub/phishingclub/database" + "github.com/phishingclub/phishingclub/errs" + "github.com/phishingclub/phishingclub/model" + "github.com/phishingclub/phishingclub/vo" + "gorm.io/gorm" +) + +var domainAllowedColumns = assignTableToColumns(database.DOMAIN_TABLE, []string{ + "created_at", + "updated_at", + "name", + "redirect_url", + "host_website", +}) + +// DomainOption is for deciding if we should load full domain entities +type DomainOption struct { + *vo.QueryArgs + WithCompany bool +} + +// Domain is a Domain repository +type Domain struct { + DB *gorm.DB +} + +// load loads the table relations +func (r *Domain) load(db *gorm.DB) *gorm.DB { + return db.Joins("Company") +} + +// Insert inserts a new domain +func (r *Domain) Insert( + ctx context.Context, + domain *model.Domain, +) (*uuid.UUID, error) { + id := uuid.New() + row := domain.ToDBMap() + row["id"] = id + AddTimestamps(row) + + result := r.DB. + Model(&database.Domain{}). + Create(row) + if result.Error != nil { + return nil, result.Error + } + return &id, nil +} + +// GetAll gets domains +func (r *Domain) GetAll( + ctx context.Context, + companyID *uuid.UUID, + options *DomainOption, +) (*model.Result[model.Domain], error) { + result := model.NewEmptyResult[model.Domain]() + var dbDomains []database.Domain + db := r.DB + if options.WithCompany { + db = r.load(db) + } + db = withCompanyIncludingNullContext(db, companyID, database.DOMAIN_TABLE) + db, err := useQuery(db, database.DOMAIN_TABLE, options.QueryArgs, domainAllowedColumns...) + if err != nil { + return result, errs.Wrap(err) + } + dbRes := db.Find(&dbDomains) + + if dbRes.Error != nil { + return result, dbRes.Error + } + + hasNextPage, err := useHasNextPage( + db, + database.DOMAIN_TABLE, + options.QueryArgs, + domainAllowedColumns..., + ) + if err != nil { + return result, errs.Wrap(err) + } + result.HasNextPage = hasNextPage + + for _, dbDomain := range dbDomains { + result.Rows = append(result.Rows, ToDomain(&dbDomain)) + } + return result, nil +} + +// GetAllByCompanyID gets domains by company ID +func (r *Domain) GetAllByCompanyID( + ctx context.Context, + companyID *uuid.UUID, + options *DomainOption, +) (*model.Result[model.Domain], error) { + result := model.NewEmptyResult[model.Domain]() + var dbDomains []database.Domain + db := r.DB + if options.WithCompany { + db = r.load(db) + } + db = whereCompany(db, database.DOMAIN_TABLE, companyID) + db, err := useQuery(db, database.DOMAIN_TABLE, options.QueryArgs, domainAllowedColumns...) + if err != nil { + return result, errs.Wrap(err) + } + dbRes := db.Find(&dbDomains) + + hasNextPage, err := useHasNextPage( + db, + database.DOMAIN_TABLE, + options.QueryArgs, + domainAllowedColumns..., + ) + if err != nil { + return result, errs.Wrap(err) + } + result.HasNextPage = hasNextPage + + if dbRes.Error != nil { + return result, dbRes.Error + } + for _, dbDomain := range dbDomains { + result.Rows = append(result.Rows, ToDomain(&dbDomain)) + } + return result, nil +} + +// GetAllSubset gets a subset of domains +// options only support sorting and searching +func (r *Domain) GetAllSubset( + ctx context.Context, + companyID *uuid.UUID, + options *DomainOption, +) (*model.Result[model.DomainOverview], error) { + result := model.NewEmptyResult[model.DomainOverview]() + db := withCompanyIncludingNullContext(r.DB, companyID, database.DOMAIN_TABLE) + db, err := useQuery(db, database.DOMAIN_TABLE, options.QueryArgs, domainAllowedColumns...) + if err != nil { + return result, errs.Wrap(err) + } + + var dbDomains []database.Domain + res := db. + Omit( + TableColumn(database.DOMAIN_TABLE, "page_content"), + TableColumn(database.DOMAIN_TABLE, "page_not_found_content"), + ). + Find(&dbDomains) + + if res.Error != nil { + return nil, res.Error + } + + hasNextPage, err := useHasNextPage(db, database.DOMAIN_TABLE, options.QueryArgs, domainAllowedColumns...) + if err != nil { + return result, errs.Wrap(err) + } + result.HasNextPage = hasNextPage + + for _, dbDomain := range dbDomains { + result.Rows = append(result.Rows, ToDomainSubset(&dbDomain)) + } + return result, nil +} + +// GetByID gets a domain by id +func (r *Domain) GetByID( + ctx context.Context, + id *uuid.UUID, + options *DomainOption, +) (*model.Domain, error) { + dbDomain := &database.Domain{} + db := r.DB + if options.WithCompany { + db = r.load(db) + } + result := db. + Model(&database.Domain{}). + Where(TableColumnID(database.DOMAIN_TABLE)+" = ?", id.String()). + First(&dbDomain) + if result.Error != nil { + return nil, result.Error + } + return ToDomain(dbDomain), nil +} + +// GetByName gets a domain by name +func (r *Domain) GetByName( + ctx context.Context, + name *vo.String255, + options *DomainOption, +) (*model.Domain, error) { + db := r.DB + dbDomain := &database.Domain{} + if options.WithCompany { + db = r.load(db) + } + result := db. + Where( + TableColumnName(database.DOMAIN_TABLE)+" = ?", name.String(), + ). + First(&dbDomain) + + if result.Error != nil { + return nil, result.Error + } + return ToDomain(dbDomain), nil +} + +// UpdateByID updates a domain by id +func (r *Domain) UpdateByID( + ctx context.Context, + domain *model.Domain, +) error { + row := domain.ToDBMap() + AddUpdatedAt(row) + res := r.DB. + Model(&database.Domain{}). + Where( + TableColumnID(database.DOMAIN_TABLE)+" = ?", domain.ID.MustGet()). + Updates(row) + + if res.Error != nil { + return res.Error + } + return nil +} + +// DeleteByID deletes a domain by id +func (r *Domain) DeleteByID( + ctx context.Context, + id *uuid.UUID, +) error { + result := r.DB. + Where( + TableColumnID(database.DOMAIN_TABLE)+" = ?", id.String()). + Delete(&database.Domain{}) + + if result.Error != nil { + return result.Error + } + return nil +} + +// ToDomain converts a domain db row to model +func ToDomain(row *database.Domain) *model.Domain { + companyID := nullable.NewNullNullable[uuid.UUID]() + if row.CompanyID != nil { + companyID.Set(*row.CompanyID) + } + var company *model.Company + if row.Company != nil { + company = ToCompany(row.Company) + } + id := nullable.NewNullableWithValue(row.ID) + name := nullable.NewNullableWithValue(*vo.NewString255Must(row.Name)) + managedTLS := nullable.NewNullableWithValue(row.ManagedTLSCerts) + ownManagedTLS := nullable.NewNullableWithValue(row.OwnManagedTLS) + hostWebsite := nullable.NewNullableWithValue(row.HostWebsite) + staticPage := nullable.NewNullableWithValue(*vo.NewOptionalString1MBMust(row.PageContent)) + staticNotFound := nullable.NewNullableWithValue(*vo.NewOptionalString1MBMust(row.PageNotFoundContent)) + redirectURL := nullable.NewNullableWithValue(*vo.NewOptionalString1024Must(row.RedirectURL)) + + return &model.Domain{ + ID: id, + CreatedAt: row.CreatedAt, + UpdatedAt: row.UpdatedAt, + Name: name, + ManagedTLS: managedTLS, + OwnManagedTLS: ownManagedTLS, + HostWebsite: hostWebsite, + PageContent: staticPage, + PageNotFoundContent: staticNotFound, + RedirectURL: redirectURL, + CompanyID: companyID, + Company: company, + } +} + +// ToDomainSubset converts a domain subset from db row to model +func ToDomainSubset(dbDomain *database.Domain) *model.DomainOverview { + return &model.DomainOverview{ + ID: dbDomain.ID, + CreatedAt: dbDomain.CreatedAt, + UpdatedAt: dbDomain.UpdatedAt, + Name: dbDomain.Name, + HostWebsite: dbDomain.HostWebsite, + ManagedTLS: dbDomain.ManagedTLSCerts, + OwnManagedTLS: dbDomain.OwnManagedTLS, + RedirectURL: dbDomain.RedirectURL, + CompanyID: dbDomain.CompanyID, + } +} diff --git a/backend/repository/email.go b/backend/repository/email.go new file mode 100644 index 0000000..f7449c8 --- /dev/null +++ b/backend/repository/email.go @@ -0,0 +1,369 @@ +package repository + +import ( + "context" + "fmt" + + "github.com/google/uuid" + "github.com/oapi-codegen/nullable" + "github.com/phishingclub/phishingclub/database" + "github.com/phishingclub/phishingclub/errs" + "github.com/phishingclub/phishingclub/model" + "github.com/phishingclub/phishingclub/vo" + "gorm.io/gorm" +) + +var allowedEmailOrderBy = assignTableToColumns(database.EMAIL_TABLE, []string{ + "created_at", + "updated_at", + "name", + "mail_from", // envelope from + "from", + "subject", + "add_tracking_pixel", +}) + +// EmailOption is for deciding if we should load full email entities +type EmailOption struct { + *vo.QueryArgs + WithCompany bool + WithAttachments bool +} + +// Email is a Email repository +type Email struct { + DB *gorm.DB +} + +// load preloads the table relations +func (m *Email) load( + db *gorm.DB, + options *EmailOption, +) *gorm.DB { + if options.WithCompany { + db = db.Preload("Company") + } + if options.WithAttachments { + db = db.Preload("Attachments") + } + return db +} + +// AddAttachment adds an attachment to a email +func (m *Email) AddAttachment( + ctx context.Context, + emailID *uuid.UUID, + attachmentID *uuid.UUID, +) error { + result := m.DB.Create( + &database.EmailAttachment{ + EmailID: emailID, + AttachmentID: attachmentID, + }, + ) + if result.Error != nil { + return result.Error + } + return nil +} + +// RemoveAttachment removes an attachment from a email +func (m *Email) RemoveAttachment( + ctx context.Context, + emailID *uuid.UUID, + attachmentID *uuid.UUID, +) error { + result := m.DB.Delete( + &database.EmailAttachment{}, + "email_id = ? AND attachment_id = ?", + emailID, + attachmentID, + ) + if result.Error != nil { + return result.Error + } + return nil +} + +// RemoveAttachment removes an attachments from a email by attachment ID +func (m *Email) RemoveAttachmentsByAttachmentID( + ctx context.Context, + attachmentID *uuid.UUID, +) error { + result := m.DB.Delete( + &database.EmailAttachment{}, + "attachment_id = ?", + attachmentID, + ) + if result.Error != nil { + return result.Error + } + return nil +} + +// Insert inserts a new email +func (m *Email) Insert( + ctx context.Context, + email *model.Email, +) (*uuid.UUID, error) { + id := uuid.New() + row := email.ToDBMap() + row["id"] = id + AddTimestamps(row) + + res := m.DB. + Model(&database.Email{}). + Create(row) + + if res.Error != nil { + return nil, res.Error + } + return &id, nil +} + +// GetByID gets a email by ID +func (m *Email) GetByID( + ctx context.Context, + id *uuid.UUID, + options *EmailOption, +) (*model.Email, error) { + dbEmail := database.Email{} + db := m.load(m.DB, options) + result := db. + Where("id = ?", id). + First(&dbEmail) + if result.Error != nil { + return nil, result.Error + } + return ToEmail(&dbEmail), nil +} + +// GetAll gets all emails +func (m *Email) GetAll( + ctx context.Context, + companyID *uuid.UUID, + options *EmailOption, +) (*model.Result[model.Email], error) { + result := model.NewEmptyResult[model.Email]() + dbEmails := []database.Email{} + db := m.load(m.DB, options) + db = withCompanyIncludingNullContext(db, companyID, database.EMAIL_TABLE) + db, err := useQuery(db, database.EMAIL_TABLE, options.QueryArgs, allowedEmailOrderBy...) + if err != nil { + return result, errs.Wrap(err) + } + dbRes := db.Find(&dbEmails) + if dbRes.Error != nil { + return result, dbRes.Error + } + + hasNextPage, err := useHasNextPage(db, database.EMAIL_TABLE, options.QueryArgs, allowedEmailOrderBy...) + + if err != nil { + return result, errs.Wrap(err) + } + result.HasNextPage = hasNextPage + + for _, dbEmail := range dbEmails { + em := ToEmail(&dbEmail) + result.Rows = append(result.Rows, em) + } + return result, nil +} + +// GetAllByCompanyID gets all emails by company id +func (m *Email) GetAllByCompanyID( + ctx context.Context, + companyID *uuid.UUID, + options *EmailOption, +) (*model.Result[model.Email], error) { + result := model.NewEmptyResult[model.Email]() + dbEmails := []database.Email{} + db := m.load(m.DB, options) + db = whereCompany(db, database.EMAIL_TABLE, companyID) + db, err := useQuery(db, database.EMAIL_TABLE, options.QueryArgs, allowedEmailOrderBy...) + if err != nil { + return result, errs.Wrap(err) + } + dbRes := db.Find(&dbEmails) + if dbRes.Error != nil { + return nil, dbRes.Error + } + + hasNextPage, err := useHasNextPage(db, database.EMAIL_TABLE, options.QueryArgs, allowedEmailOrderBy...) + if err != nil { + return result, errs.Wrap(err) + } + result.HasNextPage = hasNextPage + + for _, dbEmail := range dbEmails { + em := ToEmail(&dbEmail) + result.Rows = append(result.Rows, em) + } + return result, nil +} + +// GetOverviews gets all emails but without content +func (m *Email) GetOverviews( + ctx context.Context, + companyID *uuid.UUID, + options *EmailOption, +) (*model.Result[model.Email], error) { + result := model.NewEmptyResult[model.Email]() + dbEmails := []database.Email{} + db := m.load(m.DB, options) + db = withCompanyIncludingNullContext(db, companyID, database.EMAIL_TABLE) + db, err := useQuery(db, database.EMAIL_TABLE, options.QueryArgs, allowedEmailOrderBy...) + if err != nil { + return result, errs.Wrap(err) + } + dbRes := db. + Omit( + TableColumn(database.EMAIL_TABLE, "content"), + ). + Find(&dbEmails) + if dbRes.Error != nil { + return result, dbRes.Error + } + + hasNextPage, err := useHasNextPage(db, database.EMAIL_TABLE, options.QueryArgs, allowedEmailOrderBy...) + if err != nil { + return result, errs.Wrap(err) + } + result.HasNextPage = hasNextPage + + for _, dbEmail := range dbEmails { + em := ToEmail(&dbEmail) + result.Rows = append(result.Rows, em) + } + return result, nil +} + +// GetByNameAndCompanyID gets a email by name +func (m *Email) GetByNameAndCompanyID( + ctx context.Context, + name *vo.String64, + companyID *uuid.UUID, // can be null + options *EmailOption, +) (*model.Email, error) { + dbEmail := database.Email{} + db := m.load(m.DB, options) + var result *gorm.DB + if companyID == nil { + result = db. + Where( + fmt.Sprintf( + "%s = ? AND %s IS NULL", + TableColumn(database.EMAIL_TABLE, "name"), + TableColumn(database.EMAIL_TABLE, "company_id"), + ), + name.String(), + ). + First(&dbEmail) + } else { + result = db. + Where( + fmt.Sprintf( + "%s = ? AND %s = ?", + TableColumn(database.EMAIL_TABLE, "name"), + TableColumn(database.EMAIL_TABLE, "company_id"), + ), + name.String(), + companyID.String(), + ). + First(&dbEmail) + } + if result.Error != nil { + return nil, result.Error + } + return ToEmail(&dbEmail), nil +} + +// UpdateByID updates a email by ID +func (m *Email) UpdateByID( + ctx context.Context, + id *uuid.UUID, + email *model.Email, +) error { + row := email.ToDBMap() + AddUpdatedAt(row) + res := m.DB. + Model(&database.Email{}). + Where("id = ?", id). + Updates(row) + + if res.Error != nil { + return res.Error + } + return nil +} + +// DeleteByID deletes a email by ID +func (m *Email) DeleteByID( + ctx context.Context, + id *uuid.UUID, +) error { + result := m.DB. + Delete(&database.Email{}, id) + if result.Error != nil { + return result.Error + } + return nil +} + +func ToEmail(row *database.Email) *model.Email { + id := nullable.NewNullableWithValue(*row.ID) + companyID := nullable.NewNullNullable[uuid.UUID]() + if row.CompanyID != nil { + companyID.Set(*row.CompanyID) + } + name := nullable.NewNullableWithValue(*vo.NewString64Must(row.Name)) + subject := nullable.NewNullableWithValue(*vo.NewOptionalString255Must(row.Subject)) + envelopeFrom := nullable.NewNullableWithValue(*vo.NewMailEnvelopeFromMust(row.MailFrom)) + from := nullable.NewNullableWithValue(*vo.NewEmailMust(row.From)) + content := nullable.NewNullableWithValue(*vo.NewOptionalString1MBMust(row.Content)) + addTrackingPixel := nullable.NewNullableWithValue(row.AddTrackingPixel) + + attachments := []*model.Attachment{} + for _, attachment := range row.Attachments { + attachments = append(attachments, ToAttachment(attachment)) + } + return &model.Email{ + ID: id, + CreatedAt: row.CreatedAt, + UpdatedAt: row.UpdatedAt, + Name: name, + MailHeaderSubject: subject, + MailEnvelopeFrom: envelopeFrom, + MailHeaderFrom: from, + Content: content, + AddTrackingPixel: addTrackingPixel, + CompanyID: companyID, + Attachments: attachments, + } +} + +func ToEmailOverview(row *database.Email) *model.EmailOverview { + id := nullable.NewNullableWithValue(*row.ID) + companyID := nullable.NewNullNullable[uuid.UUID]() + if row.CompanyID != nil { + companyID.Set(*row.CompanyID) + } + name := nullable.NewNullableWithValue(*vo.NewString64Must(row.Name)) + subject := nullable.NewNullableWithValue(*vo.NewOptionalString255Must(row.Subject)) + envelopeFrom := nullable.NewNullableWithValue(*vo.NewMailEnvelopeFromMust(row.MailFrom)) + from := nullable.NewNullableWithValue(*vo.NewEmailMust(row.From)) + addTrackingPixel := nullable.NewNullableWithValue(row.AddTrackingPixel) + + return &model.EmailOverview{ + ID: id, + CreatedAt: row.CreatedAt, + UpdatedAt: row.UpdatedAt, + Name: name, + MailHeaderSubject: subject, + MailEnvelopeFrom: envelopeFrom, + MailHeaderFrom: from, + AddTrackingPixel: addTrackingPixel, + CompanyID: companyID, + } +} diff --git a/backend/repository/identifier.go b/backend/repository/identifier.go new file mode 100644 index 0000000..0219675 --- /dev/null +++ b/backend/repository/identifier.go @@ -0,0 +1,106 @@ +package repository + +import ( + "context" + "fmt" + + "github.com/google/uuid" + "github.com/oapi-codegen/nullable" + "github.com/phishingclub/phishingclub/database" + "github.com/phishingclub/phishingclub/errs" + "github.com/phishingclub/phishingclub/model" + "github.com/phishingclub/phishingclub/vo" + "gorm.io/gorm" +) + +var identifierAllowedColumns = assignTableToColumns(database.IDENTIFIER_TABLE, []string{ + TableColumn(database.IDENTIFIER_TABLE, "name"), +}) + +// IdentifierOption is options for loading +type IdentifierOption struct { + *vo.QueryArgs +} + +type Identifier struct { + DB *gorm.DB +} + +func (i *Identifier) Insert( + ctx context.Context, + identifier *model.Identifier, +) (*uuid.UUID, error) { + id := uuid.New() + row := identifier.ToDBMap() + row["id"] = id + // AddTimestamps(row) + + res := i.DB. + Model(&database.Identifier{}). + Create(row) + + if res.Error != nil { + return nil, res.Error + } + return &id, nil +} + +func (i *Identifier) GetByName( + ctx context.Context, + name string, +) (*model.Identifier, error) { + var row database.Identifier + res := i.DB. + Model(&database.Identifier{}). + Where( + fmt.Sprintf("%s = ?", TableColumnName(database.IDENTIFIER_TABLE)), + name, + ). + First(&row) + + if res.Error != nil { + return nil, res.Error + } + return ToIdentifier(&row), nil +} + +func (i *Identifier) GetAll( + ctx context.Context, + option *IdentifierOption, +) (*model.Result[model.Identifier], error) { + result := model.NewEmptyResult[model.Identifier]() + rows := []database.Identifier{} + db, err := useQuery(i.DB, database.IDENTIFIER_TABLE, option.QueryArgs, identifierAllowedColumns...) + if err != nil { + return result, errs.Wrap(err) + } + res := db. + Model(&database.Identifier{}). + Find(&rows) + + if res.Error != nil { + return result, res.Error + } + + hasNextPage, err := useHasNextPage(db, database.IDENTIFIER_TABLE, option.QueryArgs, identifierAllowedColumns...) + + if err != nil { + return result, errs.Wrap(err) + } + result.HasNextPage = hasNextPage + + for _, row := range rows { + result.Rows = append(result.Rows, ToIdentifier(&row)) + } + return result, nil +} + +func ToIdentifier(row *database.Identifier) *model.Identifier { + id := nullable.NewNullableWithValue(row.ID) + name := nullable.NewNullableWithValue(row.Name) + + return &model.Identifier{ + ID: id, + Name: name, + } +} diff --git a/backend/repository/option.go b/backend/repository/option.go new file mode 100644 index 0000000..2889bcb --- /dev/null +++ b/backend/repository/option.go @@ -0,0 +1,122 @@ +package repository + +import ( + "context" + + "github.com/go-errors/errors" + "github.com/google/uuid" + "github.com/oapi-codegen/nullable" + "github.com/phishingclub/phishingclub/database" + "github.com/phishingclub/phishingclub/errs" + "github.com/phishingclub/phishingclub/model" + "github.com/phishingclub/phishingclub/vo" + "gorm.io/gorm" +) + +// Option is a repository for Option +type Option struct { + DB *gorm.DB +} + +// GetByKey gets an option by key +func (o *Option) GetByKey( + ctx context.Context, + key string, +) (*model.Option, error) { + var option database.Option + result := o.DB.Where("key = ?", key).First(&option) + if result.Error != nil { + return nil, errors.Wrap(result.Error, 0) + } + return ToOption(&option) +} + +// updateByKey updates an option by key +func (o *Option) updateByKey( + ctx context.Context, + d *gorm.DB, + option *model.Option, +) error { + result := o.DB. + Model(&database.Option{}). + Where("key = ?", option.Key.String()). + Update("value", option.Value.String()) + + if result.Error != nil { + return errors.Wrap(result.Error, 0) + } + return nil +} + +// UpdateByKey updates an option by key +func (o *Option) UpdateByKey( + ctx context.Context, + option *model.Option, +) error { + return o.updateByKey( + ctx, + o.DB, + option, + ) +} + +// UpdateByKeyWithTransaction updates an option by key with transaction +func (o *Option) UpdateByKeyWithTransaction( + ctx context.Context, + tx *gorm.DB, + option *model.Option, +) error { + return o.updateByKey(ctx, tx, option) +} + +// insert creates an option from an option without id +func (o *Option) insert( + ctx context.Context, + d *gorm.DB, + opt *model.Option, +) (*uuid.UUID, error) { + id := uuid.New() + res := d.Create(database.Option{ + ID: &id, + Key: opt.Key.String(), + Value: opt.Value.String(), + }) + if res.Error != nil { + return nil, errors.Wrap(res.Error, 0) + } + return &id, nil +} + +// Insert creates an option +func (o *Option) Insert( + ctx context.Context, + opt *model.Option, +) (*uuid.UUID, error) { + return o.insert(ctx, o.DB, opt) +} + +// InsertWithTransaction creates an option using an transaction +func (o *Option) InsertWithTransaction( + ctx context.Context, + tx *gorm.DB, + opt *model.Option, +) (*uuid.UUID, error) { + return o.insert(ctx, tx, opt) +} + +func ToOption(dbModel *database.Option) (*model.Option, error) { + id := nullable.NewNullableWithValue(*dbModel.ID) + key, err := vo.NewString64(dbModel.Key) + if err != nil { + return nil, errs.Wrap(err) + } + value, err := vo.NewOptionalString1MB(dbModel.Value) + if err != nil { + return nil, errs.Wrap(err) + } + return &model.Option{ + ID: id, + Key: *key, + Value: *value, + }, nil +} diff --git a/backend/repository/page.go b/backend/repository/page.go new file mode 100644 index 0000000..11c45ca --- /dev/null +++ b/backend/repository/page.go @@ -0,0 +1,265 @@ +package repository + +import ( + "context" + "fmt" + "strings" + + "github.com/google/uuid" + "github.com/oapi-codegen/nullable" + "github.com/phishingclub/phishingclub/database" + "github.com/phishingclub/phishingclub/errs" + "github.com/phishingclub/phishingclub/model" + "github.com/phishingclub/phishingclub/vo" + "gorm.io/gorm" +) + +var pageAllowedColumns = assignTableToColumns(database.PAGE_TABLE, []string{ + "created_at", + "updated_at", + "name", +}) + +// PageOption is for eager loading +type PageOption struct { + Fields []string + *vo.QueryArgs + WithCompany bool +} + +// Page is a Page repository +type Page struct { + DB *gorm.DB +} + +// load preloads the table relations +func (pa *Page) load( + options *PageOption, + db *gorm.DB, +) *gorm.DB { + if options.WithCompany { + db = db.Joins("Company") + } + return db +} + +// Insert inserts a page +func (pa *Page) Insert( + ctx context.Context, + page *model.Page, +) (*uuid.UUID, error) { + id := uuid.New() + row := page.ToDBMap() + row["id"] = id + AddTimestamps(row) + + res := pa.DB. + Model(&database.Page{}). + Create(row) + + if res.Error != nil { + return nil, res.Error + } + return &id, nil +} + +// GetAll gets pages +func (pa *Page) GetAll( + ctx context.Context, + companyID *uuid.UUID, + options *PageOption, +) (*model.Result[model.Page], error) { + result := model.NewEmptyResult[model.Page]() + var dbPages []database.Page + db := pa.load(options, pa.DB) + db = withCompanyIncludingNullContext(db, companyID, database.PAGE_TABLE) + db, err := useQuery(db, database.PAGE_TABLE, options.QueryArgs, pageAllowedColumns...) + if err != nil { + return result, errs.Wrap(err) + } + if options.Fields != nil { + // TODO potential issue with inner join selects + fields := assignTableToColumns(database.PAGE_TABLE, options.Fields) + db = db.Select(strings.Join(fields, ",")) + } + dbRes := db. + Find(&dbPages) + + if dbRes.Error != nil { + return result, dbRes.Error + } + + hasNextPage, err := useHasNextPage(db, database.PAGE_TABLE, options.QueryArgs, pageAllowedColumns...) + if err != nil { + return result, errs.Wrap(err) + } + result.HasNextPage = hasNextPage + + for _, dbPage := range dbPages { + page, err := ToPage(&dbPage) + if err != nil { + return result, errs.Wrap(err) + } + result.Rows = append(result.Rows, page) + } + return result, nil +} + +// GetAllByCompanyID gets pages by company id +func (pa *Page) GetAllByCompanyID( + ctx context.Context, + companyID *uuid.UUID, + options *PageOption, +) (*model.Result[model.Page], error) { + result := model.NewEmptyResult[model.Page]() + var dbPages []database.Page + db := pa.load(options, pa.DB) + db = whereCompany(db, database.PAGE_TABLE, companyID) + db, err := useQuery(db, database.PAGE_TABLE, options.QueryArgs, pageAllowedColumns...) + if err != nil { + return result, errs.Wrap(err) + } + if options.Fields != nil { + // TODO potential issue with inner join selects + fields := assignTableToColumns(database.PAGE_TABLE, options.Fields) + db = db.Select(strings.Join(fields, ",")) + } + dbRes := db. + Find(&dbPages) + + if dbRes.Error != nil { + return result, dbRes.Error + } + + hasNextPage, err := useHasNextPage(db, database.PAGE_TABLE, options.QueryArgs, pageAllowedColumns...) + if err != nil { + return result, errs.Wrap(err) + } + result.HasNextPage = hasNextPage + + for _, dbPage := range dbPages { + page, err := ToPage(&dbPage) + if err != nil { + return result, errs.Wrap(err) + } + result.Rows = append(result.Rows, page) + } + return result, nil +} + +// GetByID gets pages by id +func (pa *Page) GetByID( + ctx context.Context, + id *uuid.UUID, + options *PageOption, +) (*model.Page, error) { + dbPage := database.Page{} + db := pa.load(options, pa.DB) + result := db. + Where(TableColumnID(database.PAGE_TABLE)+" = ?", id). + First(&dbPage) + + if result.Error != nil { + return nil, result.Error + } + return ToPage(&dbPage) +} + +// GetByCompanyID gets pages by company id +func (pa *Page) GetByCompanyID( + ctx context.Context, + companyID *uuid.UUID, + options *PageOption, +) (*model.Page, error) { + dbPage := database.Page{} + db := pa.load(options, pa.DB) + result := db. + Where(TableColumn(database.PAGE_TABLE, "company_id")+" = ?", companyID). + First(&dbPage) + + if result.Error != nil { + return nil, result.Error + } + return ToPage(&dbPage) +} + +// GetByNameAndCompanyID gets pages by name +func (pa *Page) GetByNameAndCompanyID( + ctx context.Context, + name *vo.String64, + companyID *uuid.UUID, // can be null + options *PageOption, +) (*model.Page, error) { + page := database.Page{} + db := pa.load(options, pa.DB) + db = withCompanyIncludingNullContext(db, companyID, database.PAGE_TABLE) + result := db. + Where( + fmt.Sprintf( + "%s = ?", + TableColumn(database.PAGE_TABLE, "name"), + ), + name.String(), + ). + First(&page) + + if result.Error != nil { + return nil, result.Error + } + return ToPage(&page) +} + +// UpdateByID updates a page by id +func (pa *Page) UpdateByID( + ctx context.Context, + id *uuid.UUID, + page *model.Page, +) error { + row := page.ToDBMap() + AddUpdatedAt(row) + res := pa.DB. + Model(&database.Page{}). + Where("id = ?", id). + Updates(row) + + if res.Error != nil { + return res.Error + } + return nil +} + +// DeleteByID deletes a page by id +func (l *Page) DeleteByID( + ctx context.Context, + id *uuid.UUID, +) error { + result := l.DB.Delete(&database.Page{}, id) + + if result.Error != nil { + return result.Error + } + return nil +} + +func ToPage(row *database.Page) (*model.Page, error) { + id := nullable.NewNullableWithValue(*row.ID) + companyID := nullable.NewNullNullable[uuid.UUID]() + if row.CompanyID != nil { + companyID.Set(*row.CompanyID) + } + name := nullable.NewNullableWithValue(*vo.NewString64Must(row.Name)) + c, err := vo.NewOptionalString1MB(row.Content) + if err != nil { + return nil, errs.Wrap(err) + } + content := nullable.NewNullableWithValue(*c) + + return &model.Page{ + ID: id, + CreatedAt: row.CreatedAt, + UpdatedAt: row.UpdatedAt, + CompanyID: companyID, + Name: name, + Content: content, + }, nil +} diff --git a/backend/repository/recipient.go b/backend/repository/recipient.go new file mode 100644 index 0000000..f89a875 --- /dev/null +++ b/backend/repository/recipient.go @@ -0,0 +1,710 @@ +package repository + +import ( + "context" + "fmt" + "strconv" + "time" + + "github.com/google/uuid" + "github.com/oapi-codegen/nullable" + "github.com/phishingclub/phishingclub/cache" + "github.com/phishingclub/phishingclub/data" + "github.com/phishingclub/phishingclub/database" + "github.com/phishingclub/phishingclub/errs" + "github.com/phishingclub/phishingclub/model" + "github.com/phishingclub/phishingclub/utils" + "github.com/phishingclub/phishingclub/vo" + "gorm.io/gorm" +) + +var allowdCols = []string{ + "created_at", + "updated_at", + "first_name", + "last_name", + "email", + "phone", + "extra_identifier", + "position", + "department", + "city", + "country", + "misc", +} + +// base columns with table prefix +var allowdRecipientColumns = assignTableToColumns(database.RECIPIENT_TABLE, allowdCols) + +// special columns that don't need table prefix +var allowdGetAllColumns = append( + allowdRecipientColumns, + "is_repeat_offender", +) + +var allowdRecipientCampaignEventColumns = utils.MergeStringSlices( + allowedCampaginEventColumns, + []string{ + TableColumn(database.EVENT_TABLE, "name"), + TableColumn(database.CAMPAIGN_TABLE, "name"), + }, +) + +// RecipientOption is options for preloading +type RecipientOption struct { + Fields []string + *vo.QueryArgs + + WithCompany bool + WithGroups bool +} + +// Recipient +type Recipient struct { + DB *gorm.DB + OptionRepository *Option +} + +func (r *Recipient) load(db *gorm.DB, options *RecipientOption) *gorm.DB { + if options.WithCompany { + db = db.Preload("Company") + } + if options.WithGroups { + db = db.Preload("Groups", func(db *gorm.DB) *gorm.DB { + return db.Select("id", "name").Order("name") + }) + } + return db +} + +// GetRepeatOffenderCount gets the repeat offender count +func (r *Recipient) GetRepeatOffenderCount( + ctx context.Context, + companyID *uuid.UUID, +) (int64, error) { + // get configured months from options + opt, err := r.OptionRepository.GetByKey(ctx, data.OptionKeyRepeatOffenderMonths) + if err != nil { + return 0, errs.Wrap(err) + } + months, err := strconv.Atoi(opt.Value.String()) + if err != nil { + return 0, errs.Wrap(err) + } + repeatOffenderTimeThreshold := time.Now().AddDate(0, -months, 0) + + query := fmt.Sprintf(` + SELECT COUNT(*) FROM ( + SELECT %s.id + FROM %s + WHERE EXISTS ( + SELECT 1 + FROM campaign_events ce + JOIN campaigns c ON ce.campaign_id = c.id + WHERE ce.recipient_id = %s.id + AND ce.created_at >= ? + AND c.is_test = false + GROUP BY ce.recipient_id + HAVING COUNT(DISTINCT CASE + WHEN ce.event_id IN (?, ?, ?) THEN ce.campaign_id + WHEN ce.event_id = ? THEN ce.campaign_id + END) > 1 + ) + `, database.RECIPIENT_TABLE, database.RECIPIENT_TABLE, database.RECIPIENT_TABLE) + + if companyID != nil { + query += fmt.Sprintf(" AND (%s.company_id = ? OR %s.company_id IS NULL)", + database.RECIPIENT_TABLE, database.RECIPIENT_TABLE) + query += ") as count_query" + var count int64 + err := r.DB.Raw(query, + repeatOffenderTimeThreshold, + cache.EventIDByName[data.EVENT_CAMPAIGN_RECIPIENT_BEFORE_PAGE_VISITED], + cache.EventIDByName[data.EVENT_CAMPAIGN_RECIPIENT_PAGE_VISITED], + cache.EventIDByName[data.EVENT_CAMPAIGN_RECIPIENT_AFTER_PAGE_VISITED], + cache.EventIDByName[data.EVENT_CAMPAIGN_RECIPIENT_SUBMITTED_DATA], + companyID, + ).Scan(&count).Error + return count, err + } + + query += fmt.Sprintf(" AND %s.company_id IS NULL) as count_query", database.RECIPIENT_TABLE) + var count int64 + err = r.DB.Raw(query, + repeatOffenderTimeThreshold, + cache.EventIDByName[data.EVENT_CAMPAIGN_RECIPIENT_BEFORE_PAGE_VISITED], + cache.EventIDByName[data.EVENT_CAMPAIGN_RECIPIENT_PAGE_VISITED], + cache.EventIDByName[data.EVENT_CAMPAIGN_RECIPIENT_AFTER_PAGE_VISITED], + cache.EventIDByName[data.EVENT_CAMPAIGN_RECIPIENT_SUBMITTED_DATA], + ).Scan(&count).Error + return count, err +} + +// GetAll gets all recipients +func (r *Recipient) GetAll( + ctx context.Context, + companyID *uuid.UUID, + options *RecipientOption, +) (*model.Result[model.RecipientView], error) { + result := model.NewEmptyResult[model.RecipientView]() + db := r.load(r.DB, options) + + // get configured months from options + opt, err := r.OptionRepository.GetByKey(ctx, data.OptionKeyRepeatOffenderMonths) + if err != nil { + return result, errs.Wrap(err) + } + months, err := strconv.Atoi(opt.Value.String()) + if err != nil { + return result, errs.Wrap(err) + } + repeatOffenderTimeThreshold := time.Now().AddDate(0, -months, 0) + + // Create view query with repeat offender computation + query := fmt.Sprintf(` + %s.*, + EXISTS ( + SELECT 1 + FROM campaign_events ce + JOIN campaigns c ON ce.campaign_id = c.id + WHERE ce.recipient_id = %s.id + AND ce.created_at >= ? + AND c.is_test = false + GROUP BY ce.recipient_id + HAVING COUNT(DISTINCT CASE + WHEN ce.event_id IN (?, ?, ?) THEN ce.campaign_id + WHEN ce.event_id = ? THEN ce.campaign_id + END) > 1 + ) as is_repeat_offender + `, database.RECIPIENT_TABLE, database.RECIPIENT_TABLE) + + baseDb := db.Table(database.RECIPIENT_TABLE). + Select(query, + repeatOffenderTimeThreshold, + cache.EventIDByName[data.EVENT_CAMPAIGN_RECIPIENT_BEFORE_PAGE_VISITED], + cache.EventIDByName[data.EVENT_CAMPAIGN_RECIPIENT_PAGE_VISITED], + cache.EventIDByName[data.EVENT_CAMPAIGN_RECIPIENT_AFTER_PAGE_VISITED], + cache.EventIDByName[data.EVENT_CAMPAIGN_RECIPIENT_SUBMITTED_DATA], + ) + + // Apply company filter + baseDb = withCompanyIncludingNullContext(baseDb, companyID, database.RECIPIENT_TABLE) + + // Clone the base query for the actual results + db = baseDb.Session(&gorm.Session{}) + + // Apply sorting and pagination + if options.QueryArgs != nil { + if options.QueryArgs.OrderBy == "is_repeat_offender" { + if options.QueryArgs.Desc { + db = db.Order("is_repeat_offender DESC") + baseDb = baseDb.Order("is_repeat_offender DESC") + } else { + db = db.Order("is_repeat_offender ASC") + baseDb = baseDb.Order("is_repeat_offender ASC") + } + } else { + // Use standard query handling for other columns + var err error + db, err = useQuery(db, database.RECIPIENT_TABLE, options.QueryArgs, allowdRecipientColumns...) + if err != nil { + return result, errs.Wrap(err) + } + baseDb, err = useQuery(baseDb, database.RECIPIENT_TABLE, options.QueryArgs, allowdRecipientColumns...) + if err != nil { + return result, errs.Wrap(err) + } + } + + // Apply pagination to main query only + if options.QueryArgs.Limit > 0 { + db = db.Limit(options.QueryArgs.Limit).Offset(options.QueryArgs.Offset) + } + } + + // Execute main query + var dbResults []struct { + database.Recipient + IsRepeatOffender bool `gorm:"column:is_repeat_offender"` + } + + if err := db.Find(&dbResults).Error; err != nil { + return result, errs.Wrap(err) + } + + // Check for next page + if options.QueryArgs != nil && options.QueryArgs.Limit > 0 { + var total int64 + if err := baseDb.Count(&total).Error; err != nil { + return result, errs.Wrap(err) + } + offset64 := int64(options.QueryArgs.Offset) + limit64 := int64(options.QueryArgs.Limit) + result.HasNextPage = total > (offset64 + limit64) + } + + // Convert to view models + for _, dbResult := range dbResults { + recipient, err := ToRecipient(&dbResult.Recipient) + if err != nil { + return result, errs.Wrap(err) + } + + recipientView := model.NewRecipientView(recipient) + recipientView.IsRepeatOffender = dbResult.IsRepeatOffender + + result.Rows = append(result.Rows, recipientView) + } + + return result, nil +} + +// GetAllCampaignEvents gets events by a recipient id +// if campaignID is nil, it retrieves all events +func (r *Recipient) GetAllCampaignEvents( + ctx context.Context, + recipientID *uuid.UUID, + campaignID *uuid.UUID, + queryArgs *vo.QueryArgs, +) (*model.Result[model.RecipientCampaignEvent], error) { + result := model.NewEmptyResult[model.RecipientCampaignEvent]() + db, err := useQuery( + r.DB, + database.CAMPAIGN_EVENT_TABLE, + queryArgs, + allowdRecipientCampaignEventColumns..., + ) + if err != nil { + return result, errs.Wrap(err) + } + var dbEvents []*database.RecipientCampaignEventView + db = db. + Table(database.CAMPAIGN_EVENT_TABLE). + Select( + TableSelect( + TableColumnAll(database.CAMPAIGN_EVENT_TABLE), + TableColumn(database.EVENT_TABLE, "name"), + TableColumnAlias(database.CAMPAIGN_TABLE, "name", "campaign_name"), + ), + ). + Where( + fmt.Sprintf("%s = ?", TableColumn(database.CAMPAIGN_EVENT_TABLE, "recipient_id")), + recipientID.String(), + ) + if campaignID != nil { + db = db.Where( + fmt.Sprintf("%s = ?", TableColumn(database.CAMPAIGN_EVENT_TABLE, "campaign_id")), + campaignID.String(), + ) + } + res := db.InnerJoins(LeftJoinOn( + database.CAMPAIGN_EVENT_TABLE, + "event_id", + database.EVENT_TABLE, + "id", + )). + InnerJoins(LeftJoinOn( + database.CAMPAIGN_EVENT_TABLE, + "campaign_id", + database.CAMPAIGN_TABLE, + "id", + )). + Find(&dbEvents) + if res.Error != nil { + return result, res.Error + } + + hasNextPage, err := useHasNextPage( + db, + database.CAMPAIGN_EVENT_TABLE, + queryArgs, + allowdRecipientCampaignEventColumns..., + ) + if err != nil { + return result, errs.Wrap(err) + } + result.HasNextPage = hasNextPage + + for _, event := range dbEvents { + evt, err := ToRecipientCampaignEvent(event) + if err != nil { + return result, errs.Wrap(err) + } + result.Rows = append(result.Rows, evt) + } + + return result, nil +} + +// GetByID gets a recipient by id +func (r *Recipient) GetByID( + ctx context.Context, + id *uuid.UUID, + options *RecipientOption, +) (*model.Recipient, error) { + db := r.load(r.DB, options) + var dbRecipient database.Recipient + res := db. + Where( + fmt.Sprintf("%s = ?", TableColumnID(database.RECIPIENT_TABLE)), + id, + ). + First(&dbRecipient) + + if res.Error != nil { + return nil, res.Error + } + return ToRecipient(&dbRecipient) +} + +func (r *Recipient) GetStatsByID( + ctx context.Context, + id *uuid.UUID, +) (*model.RecipientCampaignStatsView, error) { + stats := &model.RecipientCampaignStatsView{} + + // get configured months from options + opt, err := r.OptionRepository.GetByKey(ctx, data.OptionKeyRepeatOffenderMonths) + if err != nil { + return nil, errs.Wrap(err) + } + months, err := strconv.Atoi(opt.Value.String()) + if err != nil { + return nil, errs.Wrap(err) + } + repeatOffenderTimeThreshold := time.Now().AddDate(0, -months, 0) + + // get campaign count + r.DB.Model(&database.CampaignRecipient{}). + Joins("JOIN campaigns ON campaigns.id = campaign_recipients.campaign_id"). + Where("campaign_recipients.recipient_id = ? AND campaigns.is_test = ?", id, false). + Distinct("campaign_recipients.campaign_id"). + Count(&stats.CampaignsParticiated) + + // get unique tracking pixels loaded + r.DB.Model(&database.CampaignEvent{}). + Joins("JOIN campaigns ON campaigns.id = campaign_events.campaign_id"). + Where( + "campaign_events.recipient_id = ? AND campaign_events.event_id = ? AND campaigns.is_test = ?", + id, + cache.EventIDByName[data.EVENT_CAMPAIGN_RECIPIENT_MESSAGE_READ], + false, + ). + Distinct("campaign_events.campaign_id"). + Count(&stats.CampaignsTrackingPixelLoaded) + + // get any phishing page loaded distinct by recipient and campaign + r.DB.Model(&database.CampaignEvent{}). + Joins("JOIN campaigns ON campaigns.id = campaign_events.campaign_id"). + Where( + "campaign_events.recipient_id = ? AND campaign_events.event_id IN (?,?,?) AND campaigns.is_test = ?", + id, + cache.EventIDByName[data.EVENT_CAMPAIGN_RECIPIENT_BEFORE_PAGE_VISITED], + cache.EventIDByName[data.EVENT_CAMPAIGN_RECIPIENT_PAGE_VISITED], + cache.EventIDByName[data.EVENT_CAMPAIGN_RECIPIENT_AFTER_PAGE_VISITED], + false, + ). + Distinct("campaign_events.campaign_id"). + Count(&stats.CampaignsPhishingPageLoaded) + + // get unique submits + r.DB.Model(&database.CampaignEvent{}). + Joins("JOIN campaigns ON campaigns.id = campaign_events.campaign_id"). + Where( + "campaign_events.recipient_id = ? AND campaign_events.event_id = ? AND campaigns.is_test = ?", + id, + cache.EventIDByName[data.EVENT_CAMPAIGN_RECIPIENT_SUBMITTED_DATA], + false, + ). + Distinct("campaign_events.campaign_id"). + Count(&stats.CampaignsDataSubmitted) + + // Get repeat link clicks in last selected threshold months + var linkClickCount int64 + r.DB.Model(&database.CampaignEvent{}). + Joins("JOIN campaigns ON campaigns.id = campaign_events.campaign_id"). + Select("COUNT(DISTINCT campaign_events.campaign_id)"). + Where( + "campaign_events.recipient_id = ? AND campaign_events.event_id IN (?,?,?) AND campaign_events.created_at >= ? AND campaigns.is_test = ?", + id, + cache.EventIDByName[data.EVENT_CAMPAIGN_RECIPIENT_BEFORE_PAGE_VISITED], + cache.EventIDByName[data.EVENT_CAMPAIGN_RECIPIENT_PAGE_VISITED], + cache.EventIDByName[data.EVENT_CAMPAIGN_RECIPIENT_AFTER_PAGE_VISITED], + repeatOffenderTimeThreshold, + false, + ). + Scan(&linkClickCount) + + // If they clicked in more than one campaign in the last x months, they're a repeat offender + if linkClickCount > 1 { + stats.RepeatLinkClicks = linkClickCount - 1 // Subtract 1 since we only count repeats + } else { + stats.RepeatLinkClicks = 0 + } + + // Get repeat submissions in last x months + var submitCount int64 + r.DB.Model(&database.CampaignEvent{}). + Joins("JOIN campaigns ON campaigns.id = campaign_events.campaign_id"). + Select("COUNT(DISTINCT campaign_events.campaign_id)"). + Where( + "campaign_events.recipient_id = ? AND campaign_events.event_id = ? AND campaign_events.created_at >= ? AND campaigns.is_test = ?", + id, + cache.EventIDByName[data.EVENT_CAMPAIGN_RECIPIENT_SUBMITTED_DATA], + repeatOffenderTimeThreshold, + false, + ). + Scan(&submitCount) + + // If they submitted in more than one campaign in the last x months, they're a repeat offender + if submitCount > 1 { + stats.RepeatSubmissions = submitCount - 1 // Subtract 1 since we only count repeats + } else { + stats.RepeatSubmissions = 0 + } + + return stats, nil +} + +// GetEmailByID gets a recipient by id +func (r *Recipient) GetEmailByID( + ctx context.Context, + id *uuid.UUID, +) (*vo.Email, error) { + var recipient database.Recipient + res := r.DB. + Select( + TableColumn(database.RECIPIENT_TABLE, "email"), + ). + Where("id = ?", id). + First(&recipient) + + if res.Error != nil { + return nil, res.Error + } + return vo.NewEmailMust(*recipient.Email), nil +} + +// GetAllByCompanyID gets all recipients by company id +func (r *Recipient) GetAllByCompanyID( + ctx context.Context, + companyID *uuid.UUID, + options *RecipientOption, +) (*model.Result[model.Recipient], error) { + result := model.NewEmptyResult[model.Recipient]() + db := r.load(r.DB, options) + var dbRecipients []database.Recipient + db = whereCompany(db, database.RECIPIENT_TABLE, companyID) + db, err := useQuery(db, database.RECIPIENT_TABLE, options.QueryArgs, allowdRecipientColumns...) + if err != nil { + return result, errs.Wrap(err) + } + res := db.Find(&dbRecipients) + + if res.Error != nil { + return result, res.Error + } + + hasNextPage, err := useHasNextPage(db, database.RECIPIENT_TABLE, options.QueryArgs, allowdRecipientColumns...) + if err != nil { + return result, errs.Wrap(err) + } + result.HasNextPage = hasNextPage + + for _, dbRecipient := range dbRecipients { + r, err := ToRecipient(&dbRecipient) + if err != nil { + return result, errs.Wrap(err) + } + result.Rows = append(result.Rows, r) + } + return result, nil +} + +func (r *Recipient) GetByEmail( + ctx context.Context, + email *vo.Email, + fields ...string, +) (*model.Recipient, error) { + var dbRecipient database.Recipient + fields = assignTableToColumns(database.RECIPIENT_TABLE, fields) + res := useSelect(r.DB, fields). + Where( + fmt.Sprintf("%s = ?", TableColumn(database.RECIPIENT_TABLE, "email")), + email.String(), + ). + First(&dbRecipient) + if res.Error != nil { + return nil, res.Error + } + return ToRecipient(&dbRecipient) +} + +func (r *Recipient) GetByEmailAndCompanyID( + ctx context.Context, + email *vo.Email, + companyID *uuid.UUID, + fields ...string, +) (*model.Recipient, error) { + var dbRecipient database.Recipient + q := r.DB + if companyID == nil { + q = q.Where( + fmt.Sprintf( + "%s = ? AND %s IS NULL", + TableColumn(database.RECIPIENT_TABLE, "email"), + TableColumn(database.RECIPIENT_TABLE, "company_id"), + ), + email.String(), + ) + } else { + q = q.Where( + fmt.Sprintf( + "%s = ? AND %s = ?", + TableColumn(database.RECIPIENT_TABLE, "email"), + TableColumn(database.RECIPIENT_TABLE, "company_id"), + ), + email.String(), + companyID, + ) + } + fields = assignTableToColumns(database.RECIPIENT_TABLE, fields) + q = useSelect(q, fields) + res := q. + First(&dbRecipient) + + if res.Error != nil { + return nil, res.Error + } + return ToRecipient(&dbRecipient) +} + +// Insert inserts a new recipient +// there is a conflict, were if a user has email a@a.com and another has the phone number 1234 +// if there is a user update by other identifier containing a@a.com and phone number, which one +// should it select? It matches two different identities. This is a conflict. +// a solution could be not allow updating if there is a conflict with two matching targets +// this solution is implemented +func (r *Recipient) Insert( + ctx context.Context, + recp *model.Recipient, +) (*uuid.UUID, error) { + id := uuid.New() + row := recp.ToDBMap() + row["id"] = id + AddTimestamps(row) + res := r.DB.Model(&database.Recipient{}).Create(row) + + if res.Error != nil { + return nil, res.Error + } + return &id, nil +} + +// UpdateByID updates a recipient by id +func (r *Recipient) UpdateByID( + ctx context.Context, + id *uuid.UUID, + recp *model.Recipient, +) error { + row := recp.ToDBMap() + AddUpdatedAt(row) + res := r.DB. + Model(&database.Recipient{}). + Where("id = ?", id). + Updates(row) + + if res.Error != nil { + return res.Error + } + return nil +} + +// DeleteByID deletes a recipient by id +func (r *Recipient) DeleteByID( + ctx context.Context, + id *uuid.UUID, +) error { + res := r.DB. + Where("id = ?", id). + Delete(&database.Recipient{}) + + if res.Error != nil { + return res.Error + } + return nil +} + +func ToRecipient(row *database.Recipient) (*model.Recipient, error) { + id := nullable.NewNullableWithValue(*row.ID) + companyID := nullable.NewNullNullable[uuid.UUID]() + if row.CompanyID != nil { + companyID.Set(*row.CompanyID) + } + firstName := nullable.NewNullableWithValue( + *vo.NewOptionalString127Must(row.FirstName), + ) + lastName := nullable.NewNullableWithValue( + *vo.NewOptionalString127Must(row.LastName), + ) + email := nullable.NewNullNullable[vo.Email]() + if row.Email != nil && *row.Email != "" { + email.Set(*vo.NewEmailMust(*row.Email)) + } + phone := nullable.NewNullableWithValue(*vo.NewOptionalString127Must("")) + if row.Phone != nil && *row.Phone != "" { + phone.Set(*vo.NewOptionalString127Must(*row.Phone)) + } + extraIdentifier := nullable.NewNullableWithValue(*vo.NewOptionalString127Must("")) + if row.ExtraIdentifier != nil && *row.ExtraIdentifier != "" { + extraIdentifier.Set(*vo.NewOptionalString127Must(*row.ExtraIdentifier)) + } + position := nullable.NewNullableWithValue( + *vo.NewOptionalString127Must(row.Position), + ) + department := nullable.NewNullableWithValue( + *vo.NewOptionalString127Must(row.Department), + ) + city := nullable.NewNullableWithValue( + *vo.NewOptionalString127Must(row.City), + ) + country := nullable.NewNullableWithValue( + *vo.NewOptionalString127Must(row.Country), + ) + misc := nullable.NewNullableWithValue( + *vo.NewOptionalString127Must(row.Misc), + ) + var company *model.Company + if row.Company != nil { + company = ToCompany(row.Company) + } + var groups []*model.RecipientGroup + if row.Groups != nil && len(row.Groups) > 0 { + for _, group := range row.Groups { + g, err := ToRecipientGroup(&group) + if err != nil { + return nil, errs.Wrap(err) + } + groups = append(groups, g) + } + } + + return &model.Recipient{ + ID: id, + CreatedAt: row.CreatedAt, + UpdatedAt: row.UpdatedAt, + CompanyID: companyID, + FirstName: firstName, + LastName: lastName, + Email: email, + Phone: phone, + ExtraIdentifier: extraIdentifier, + Position: position, + Department: department, + City: city, + Country: country, + Misc: misc, + Company: company, + Groups: nullable.NewNullableWithValue(groups), + }, nil +} diff --git a/backend/repository/recipientGroup.go b/backend/repository/recipientGroup.go new file mode 100644 index 0000000..ee02323 --- /dev/null +++ b/backend/repository/recipientGroup.go @@ -0,0 +1,533 @@ +package repository + +import ( + "context" + "fmt" + + "github.com/google/uuid" + "github.com/oapi-codegen/nullable" + "github.com/phishingclub/phishingclub/database" + "github.com/phishingclub/phishingclub/errs" + "github.com/phishingclub/phishingclub/model" + "github.com/phishingclub/phishingclub/vo" + "gorm.io/gorm" +) + +var RecipientGroupAllowedColumns = assignTableToColumns(database.RECIPIENT_GROUP_TABLE, []string{ + "created_at", + "updated_at", + "name", +}) + +// RecipientGroupOption is a recipient group option +type RecipientGroupOption struct { + *vo.QueryArgs + + WithCompany bool + WithRecipients bool + WithRecipientCount bool +} + +// RecipientGroup is a recipient group repository +type RecipientGroup struct { + DB *gorm.DB +} + +// preload loads relational data +func (rg *RecipientGroup) preload( + options *RecipientGroupOption, + db *gorm.DB, +) *gorm.DB { + if options.WithCompany { + db = db.Preload("Company") + } + if options.WithRecipients { + db = db.Preload("Recipients") + } + return db +} + +// Insert inserts a new recipient group +func (rg *RecipientGroup) Insert( + ctx context.Context, + recipientGroup *model.RecipientGroup, +) (*uuid.UUID, error) { + id := uuid.New() + row := recipientGroup.ToDBMap() + row["id"] = id + AddTimestamps(row) + + res := rg.DB. + Model(&database.RecipientGroup{}). + Create(row) + + if res.Error != nil { + return nil, res.Error + } + return &id, nil +} + +// AddRecipients adds recipients to a recipient group +func (rg *RecipientGroup) AddRecipients( + ctx context.Context, + groupID *uuid.UUID, + recipients []*uuid.UUID, +) error { + for _, recipientID := range recipients { + /* when performing the below optizmie we can handle the whole batch as a single operation... + batch = append(batch, database.RecipientGroupRecipient{ + RecipientID: recipientID, + RecipientGroupID: groupID, + }) + */ + + var c int64 + // check if the recipient already exists, if so, skip + res := rg.DB. + Model(&database.RecipientGroupRecipient{}). + Where("recipient_id = ? AND recipient_group_id = ?", recipientID, groupID). + Count(&c) + + if res.Error != nil { + return res.Error + } + // if already in group, skip it + if c > 0 { + continue + } + res = rg.DB. + Model(&database.RecipientGroupRecipient{}). + Create(&database.RecipientGroupRecipient{ + RecipientID: recipientID, + RecipientGroupID: groupID, + }) + if res.Error != nil { + return res.Error + } + } + + /* TODO OPTIMIZE - This very slow implementation is written like this because it was faster to write + than it is to setup and handle different databases sqlite, mysql, postgres. + Optmize this away by checking which db type we are using and using the correct query such as + IGNORE on mysql and postgres and ON CONFLICT IGNORE on sqlite or something like that.. + */ + // clause.Insert ignores unique constraint violations so they do not get created, but they do not error + // does not work in sqlite + /* + result := db.Clauses(clause.Insert{Modifier: "OR IGNORE"}).Create(&batch) + if result.Error != nil { + return result.Error + } + */ + + return nil +} + +// countRecipients gets the count of recipient groups +func (rg *RecipientGroup) countRecipients( + ctx context.Context, + group *database.RecipientGroup, + options *RecipientGroupOption, +) (int64, error) { + count := model.RECIPIENT_COUNT_NOT_LOADED + if options.WithRecipientCount { + // if recipients is loaded then we can get the count from the slice + if options.WithRecipients { + count = int64(len(group.Recipients)) + } else { + // otherwise we need to query the storage + c, err := rg.GetRecipientCount(ctx, group.ID) + if err != nil { + return count, errs.Wrap(err) + } + count = c + } + } + return count, nil +} + +// GetAll gets all recipient groups with pagination +func (rg *RecipientGroup) GetAll( + ctx context.Context, + companyID *uuid.UUID, + options *RecipientGroupOption, +) (*model.Result[model.RecipientGroup], error) { + result := model.NewEmptyResult[model.RecipientGroup]() + db := rg.preload(options, rg.DB) + db = withCompanyIncludingNullContext(db, companyID, database.RECIPIENT_GROUP_TABLE) + db, err := useQuery( + db, + database.RECIPIENT_GROUP_TABLE, + options.QueryArgs, + RecipientGroupAllowedColumns..., + ) + if err != nil { + return result, errs.Wrap(err) + } + + var rows []database.RecipientGroup + dbRes := db.Find(&rows) + + if dbRes.Error != nil { + return result, dbRes.Error + } + + hasNextPage, err := useHasNextPage( + db, + database.RECIPIENT_GROUP_TABLE, + options.QueryArgs, + RecipientGroupAllowedColumns..., + ) + if err != nil { + return result, errs.Wrap(err) + } + result.HasNextPage = hasNextPage + + for _, recipientGroup := range rows { + count, err := rg.countRecipients(ctx, &recipientGroup, options) + if err != nil { + return result, errs.Wrap(err) + } + recipient, err := ToRecipientGroup(&recipientGroup) + if err != nil { + return nil, errs.Wrap(err) + } + + c := nullable.NewNullNullable[int64]() + if count != model.RECIPIENT_COUNT_NOT_LOADED { + c.Set(count) + } + recipient.RecipientCount = c + result.Rows = append(result.Rows, recipient) + + } + + return result, nil +} + +// GetAllByCompanyID gets all recipient groups with pagination by company ID +func (rg *RecipientGroup) GetAllByCompanyID( + ctx context.Context, + companyID *uuid.UUID, + options *RecipientGroupOption, +) ([]*model.RecipientGroup, error) { + recipientGroups := []*model.RecipientGroup{} + var rows []database.RecipientGroup + db := rg.preload(options, rg.DB) + db = whereCompany(db, database.RECIPIENT_GROUP_TABLE, companyID) + db, err := useQuery( + db, + database.RECIPIENT_GROUP_TABLE, + options.QueryArgs, + RecipientGroupAllowedColumns..., + ) + if err != nil { + return recipientGroups, errs.Wrap(err) + } + result := db.Find(&rows) + + if result.Error != nil { + return []*model.RecipientGroup{}, result.Error + } + for _, recipientGroup := range rows { + count, err := rg.countRecipients(ctx, &recipientGroup, options) + if err != nil { + return recipientGroups, errs.Wrap(err) + } + recipient, err := ToRecipientGroup(&recipientGroup) + if err != nil { + return nil, errs.Wrap(err) + } + + c := nullable.NewNullNullable[int64]() + if count != model.RECIPIENT_COUNT_NOT_LOADED { + c.Set(count) + } + recipient.RecipientCount = c + recipientGroups = append(recipientGroups, recipient) + + } + return recipientGroups, nil +} + +// GetRecipientCount gets the recipient count of a recipient group +func (rg *RecipientGroup) GetRecipientCount( + ctx context.Context, + groupID *uuid.UUID, +) (int64, error) { + var count int64 + result := rg.DB. + Model(&database.RecipientGroupRecipient{}). + Where( + fmt.Sprintf( + "%s = ?", + TableColumn(database.RECIPIENT_GROUP_RECIPIENT_TABLE, "recipient_group_id"), + ), + groupID.String(), + ). + Count(&count) + if result.Error != nil { + return 0, result.Error + } + return count, nil +} + +// GetByID gets a recipient group by id +func (rg *RecipientGroup) GetByID( + ctx context.Context, + id *uuid.UUID, + options *RecipientGroupOption, +) (*model.RecipientGroup, error) { + var rows database.RecipientGroup + db := rg.preload(options, rg.DB) + result := db. + Where( + fmt.Sprintf( + "%s = ?", + TableColumnID(database.RECIPIENT_GROUP_TABLE), + ), + id.String(), + ). + First(&rows) + + if result.Error != nil { + return nil, result.Error + } + count, err := rg.countRecipients( + ctx, + &rows, + options, + ) + if err != nil { + return nil, errs.Wrap(err) + } + recipientGroup, err := ToRecipientGroup(&rows) + if err != nil { + return nil, errs.Wrap(err) + } + c := nullable.NewNullNullable[int64]() + if count != model.RECIPIENT_COUNT_NOT_LOADED { + c.Set(count) + } + recipientGroup.RecipientCount = c + return recipientGroup, nil +} + +// GetByNameAndCompanyID gets a recipient group by name +func (rg *RecipientGroup) GetByNameAndCompanyID( + ctx context.Context, + name string, + companyID *uuid.UUID, + options *RecipientGroupOption, +) (*model.RecipientGroup, error) { + var recipientGroup database.RecipientGroup + db := rg.preload(options, rg.DB) + whereCompany := fmt.Sprintf( + "%s IS NULL", + TableColumn(database.RECIPIENT_GROUP_TABLE, "company_id"), + ) + if companyID != nil { + whereCompany = fmt.Sprintf( + "%s = ?", + TableColumn(database.RECIPIENT_GROUP_TABLE, "company_id"), + ) + } + result := db. + Where( + fmt.Sprintf( + "%s = ? AND %s", + TableColumnName(database.RECIPIENT_GROUP_TABLE), + whereCompany, + ), + name, + companyID, + ). + First(&recipientGroup) + + if result.Error != nil { + return nil, result.Error + } + count, err := rg.countRecipients( + ctx, + &recipientGroup, + options, + ) + if err != nil { + return nil, errs.Wrap(err) + } + recpGroup, err := ToRecipientGroup(&recipientGroup) + if err != nil { + return nil, errs.Wrap(err) + } + c := nullable.NewNullNullable[int64]() + if count != model.RECIPIENT_COUNT_NOT_LOADED { + c.Set(count) + } + recpGroup.RecipientCount = c + return recpGroup, nil +} + +// GetRecipientsByGroupID gets recipients by recipient group id +func (rg *RecipientGroup) GetRecipientsByGroupID( + ctx context.Context, + id *uuid.UUID, + options *RecipientOption, +) (*model.Result[model.Recipient], error) { + result := model.NewEmptyResult[model.Recipient]() + db := rg.DB + var recipients []database.Recipient + if options.WithCompany { + db = db.Preload("Company") + } + db, err := useQuery(db, database.RECIPIENT_TABLE, options.QueryArgs, allowdRecipientColumns...) + if err != nil { + return result, errs.Wrap(err) + } + dbRes := db. + Model(&database.Recipient{}). + Joins("JOIN recipient_group_recipients ON recipient_group_recipients.recipient_id = recipients.id"). + Where( + fmt.Sprintf( + "%s = ?", + TableColumn(database.RECIPIENT_GROUP_RECIPIENT_TABLE, "recipient_group_id"), + ), + id.String(), + ). + Find(&recipients) + + if dbRes.Error != nil { + return result, dbRes.Error + } + + hasNextPage, err := useHasNextPage( + db, database.RECIPIENT_TABLE, options.QueryArgs, allowdRecipientColumns..., + ) + if err != nil { + return result, errs.Wrap(err) + } + result.HasNextPage = hasNextPage + + for _, recipient := range recipients { + r, err := ToRecipient(&recipient) + if err != nil { + return nil, errs.Wrap(err) + } + result.Rows = append(result.Rows, r) + } + return result, nil +} + +// UpdateByID updates a recipient group by id +func (rg *RecipientGroup) UpdateByID( + ctx context.Context, + id *uuid.UUID, + recipientGroup *model.RecipientGroup, +) error { + row := recipientGroup.ToDBMap() + AddUpdatedAt(row) + res := rg.DB. + Model(&database.RecipientGroup{}). + Where( + fmt.Sprintf( + "%s = ?", + TableColumnID(database.RECIPIENT_GROUP_TABLE), + ), + id.String(), + ). + Updates(row) + + if res.Error != nil { + return res.Error + } + return nil +} + +// RemoveRecipientByIDFromAllGroups removes a recipient from all recipient groups +func (rg *RecipientGroup) RemoveRecipientByIDFromAllGroups( + ctx context.Context, + recipientID *uuid.UUID, +) error { + result := rg.DB. + Where( + fmt.Sprintf( + "%s = ?", + TableColumn(database.RECIPIENT_GROUP_RECIPIENT_TABLE, "recipient_id"), + ), + recipientID.String(), + ). + Delete(&database.RecipientGroupRecipient{}) + + if result.Error != nil { + return result.Error + } + return nil +} + +// RemoveRecipients removes a recipient from a recipient group +func (rg *RecipientGroup) RemoveRecipients( + ctx context.Context, + groupID *uuid.UUID, + recipientIDs []*uuid.UUID, +) error { + result := rg.DB. + Where("recipient_group_id = ? AND recipient_id IN ?", groupID, recipientIDs). + Delete(&database.RecipientGroupRecipient{}) + + if result.Error != nil { + return result.Error + } + return nil +} + +// DeleteByID deletes a recipient group by id +func (rg *RecipientGroup) DeleteByID( + ctx context.Context, + id *uuid.UUID, +) error { + // delete recipients + res := rg.DB. + Where("recipient_group_id = ?", id). + Delete(&database.RecipientGroupRecipient{}) + + if res.Error != nil { + return res.Error + } + // delete recipient group + res = rg.DB. + Where("id = ?", id). + Delete(&database.RecipientGroup{}) + + if res.Error != nil { + return res.Error + } + return nil +} + +func ToRecipientGroup(row *database.RecipientGroup) (*model.RecipientGroup, error) { + id := nullable.NewNullableWithValue(*row.ID) + companyID := nullable.NewNullNullable[uuid.UUID]() + if row.CompanyID != nil { + companyID.Set(*row.CompanyID) + } + name := nullable.NewNullableWithValue(*vo.NewString127Must(row.Name)) + recipients := []*model.Recipient{} + if len(row.Recipients) > 0 { + for _, recipient := range row.Recipients { + r, err := ToRecipient(&recipient) + if err != nil { + return nil, errs.Wrap(err) + } + recipients = append(recipients, r) + } + } + + return &model.RecipientGroup{ + ID: id, + CreatedAt: row.CreatedAt, + UpdatedAt: row.UpdatedAt, + Name: name, + CompanyID: companyID, + Recipients: recipients, + RecipientCount: nullable.NewNullNullable[int64](), + }, nil +} diff --git a/backend/repository/role.go b/backend/repository/role.go new file mode 100644 index 0000000..fe53a4e --- /dev/null +++ b/backend/repository/role.go @@ -0,0 +1,72 @@ +package repository + +import ( + "context" + + "github.com/google/uuid" + "github.com/phishingclub/phishingclub/database" + "github.com/phishingclub/phishingclub/model" + "gorm.io/gorm" +) + +// Role is the Role repository +type Role struct { + DB *gorm.DB +} + +// GetByName gets a role by name +func (r *Role) GetByName( + ctx context.Context, + name string, +) (*model.Role, error) { + var dbRole database.Role + result := r.DB.Where("name = ?", name).First(&dbRole) + + if result.Error != nil { + return nil, result.Error + } + return ToRole(&dbRole), nil +} + +// GetByID gets a role by id +func (r *Role) GetByID( + ctx context.Context, + id *uuid.UUID, +) (*model.Role, error) { + var dbRole database.Role + result := r.DB. + Where("id = ?", id.String()). + First(&dbRole) + + if result.Error != nil { + return nil, result.Error + } + return ToRole(&dbRole), nil +} + +// insert saves a new role +// Insert saves a new user role +func (r *Role) Insert( + ctx context.Context, + role *model.Role, +) (*uuid.UUID, error) { + id := uuid.New() + row := role.ToDBMap() + row["id"] = id + + res := r.DB. + Model(&database.Role{}). + Create(row) + + if res.Error != nil { + return nil, res.Error + } + return &id, nil +} + +func ToRole(row *database.Role) *model.Role { + return &model.Role{ + ID: *row.ID, + Name: row.Name, + } +} diff --git a/backend/repository/session.go b/backend/repository/session.go new file mode 100644 index 0000000..daf7572 --- /dev/null +++ b/backend/repository/session.go @@ -0,0 +1,211 @@ +package repository + +import ( + "context" + "fmt" + + "github.com/google/uuid" + "github.com/phishingclub/phishingclub/database" + "github.com/phishingclub/phishingclub/errs" + "github.com/phishingclub/phishingclub/model" + "github.com/phishingclub/phishingclub/utils" + "github.com/phishingclub/phishingclub/vo" + "gorm.io/gorm" +) + +var allowedSessionColumns = assignTableToColumns(database.SESSION_TABLE, []string{ + "created_at", + "updated_at", + "ip_address", +}) + +// SessionOption is a session option +type SessionOption struct { + *vo.QueryArgs + + WithUser bool + WithUserRole bool + WithUserCompany bool +} + +// Session is a repository for Session +type Session struct { + DB *gorm.DB +} + +// / preload preloads the user ... with the role and company to a user? +func (r *Session) with(option *SessionOption, db *gorm.DB) *gorm.DB { + if option.WithUser { + db := db.Preload("User") + if option.WithUserRole { + db = db.Preload("User.Role") + } + if option.WithUserCompany { + db = db.Preload("User.Company") + } + return db + } + return db +} + +// Insert creates a new session +func (r *Session) Insert( + ctx context.Context, + session *model.Session, +) (*uuid.UUID, error) { + id := uuid.New() + row := map[string]interface{}{ + "id": id.String(), + "expires_at": utils.RFC3339UTC(*session.ExpiresAt), + "max_age_at": utils.RFC3339UTC(*session.MaxAgeAt), + "ip_address": session.IP, + "user_id": session.User.ID.MustGet().String(), + } + AddTimestamps(row) + result := r.DB.Model(&database.Session{}). + Create(row) + + if result.Error != nil { + return nil, result.Error + } + return &id, nil + +} + +// GetByID gets a session +func (r *Session) GetByID( + ctx context.Context, + id *uuid.UUID, + options *SessionOption, +) (*model.Session, error) { + var dbSession database.Session + // get session by id which is not expired or older than max age + now := utils.NowRFC3339UTC() + db := r.with(options, r.DB) + result := db.First( + &dbSession, + fmt.Sprintf( + "%s = ? AND %s > ? AND %s > ?", + TableColumnID(database.SESSION_TABLE), + TableColumn(database.SESSION_TABLE, "expires_at"), + TableColumn(database.SESSION_TABLE, "max_age_at"), + ), + id.String(), + now, + now, + ) + if result.Error != nil { + return nil, result.Error + } + return ToSession(&dbSession) +} + +// GetAllActiveSessionByUserID gets all sessions by user ID +func (r *Session) GetAllActiveSessionByUserID( + ctx context.Context, + userID *uuid.UUID, + options *SessionOption, +) (*model.Result[model.Session], error) { + result := model.NewEmptyResult[model.Session]() + var dbSessions []database.Session + now := utils.NowRFC3339UTC() + db := r.with(options, r.DB) + db, err := useQuery(db, database.SESSION_TABLE, options.QueryArgs, allowedSessionColumns...) + if err != nil { + return result, errs.Wrap(err) + } + dbRes := db.Find( + &dbSessions, + fmt.Sprintf( + "%s = ? AND (expires_at > ? OR %s > ?)", + TableColumn(database.SESSION_TABLE, "user_id"), + TableColumn(database.SESSION_TABLE, "max_age_at"), + ), + userID.String(), + now, + now, + ) + + if dbRes.Error != nil { + return result, dbRes.Error + } + hasNextPage, err := useHasNextPage( + db, database.SESSION_TABLE, options.QueryArgs, allowedSessionColumns..., + ) + if err != nil { + return result, errs.Wrap(err) + } + result.HasNextPage = hasNextPage + + for _, dbSession := range dbSessions { + session, err := ToSession(&dbSession) + if err != nil { + return result, errs.Wrap(err) + } + result.Rows = append(result.Rows, session) + } + return result, nil +} + +// UpdateExpiry updates a session +func (r *Session) UpdateExpiry( + ctx context.Context, + session *model.Session, +) error { + row := map[string]any{ + "expires_at": utils.RFC3339UTC(*session.ExpiresAt), + } + AddUpdatedAt(row) + result := r.DB. + Model(&database.Session{}). + Where("id = ?", session.ID.String()). + Updates(row) + + if result.Error != nil { + return result.Error + } + return nil +} + +// Expire expires a session +func (r *Session) Expire( + ctx context.Context, + sessionID *uuid.UUID, +) error { + now := utils.NowRFC3339UTC() + row := map[string]any{ + "expires_at": now, + "max_age_at": now, + } + AddUpdatedAt(row) + // update both expires_at and max_age_at to now + result := r.DB. + Model(&database.Session{}). + Where("id = ?", sessionID.String()). + Updates(row) + + if result.Error != nil { + return result.Error + } + return nil +} + +func ToSession(row *database.Session) (*model.Session, error) { + var user *model.User + if row.User != nil { + u, err := ToUser(row.User) + if err != nil { + return nil, errs.Wrap(err) + } + user = u + } + return &model.Session{ + ID: row.ID, + CreatedAt: row.CreatedAt, + UpdatedAt: row.UpdatedAt, + ExpiresAt: row.ExpiresAt, + MaxAgeAt: row.MaxAgeAt, + IP: row.IPAddress, + User: user, + }, nil +} diff --git a/backend/repository/smtpConfiguration.go b/backend/repository/smtpConfiguration.go new file mode 100644 index 0000000..562e658 --- /dev/null +++ b/backend/repository/smtpConfiguration.go @@ -0,0 +1,300 @@ +package repository + +import ( + "context" + "fmt" + + "github.com/google/uuid" + "github.com/oapi-codegen/nullable" + "github.com/phishingclub/phishingclub/database" + "github.com/phishingclub/phishingclub/errs" + "github.com/phishingclub/phishingclub/model" + "github.com/phishingclub/phishingclub/vo" + "gorm.io/gorm" +) + +var allowedSMTPConfigurationsSortBy = assignTableToColumns(database.SMTP_CONFIGURATION_TABLE, []string{ + "created_at", + "updated_at", + "name", + "host", + "port", + "username", +}) + +// SMTPConfigurationOption is options for preloading +type SMTPConfigurationOption struct { + *vo.QueryArgs + + WithCompany bool + WithHeaders bool +} + +// SMTPConfiguration is a SMTP configuration repository +type SMTPConfiguration struct { + DB *gorm.DB +} + +// preload applies the preloading options +func (r SMTPConfiguration) preload(o *SMTPConfigurationOption, db *gorm.DB) *gorm.DB { + if o == nil { + return db + } + if o.WithCompany { + db = db.Preload("Company") + } + if o.WithHeaders { + db = db.Preload("Headers", func(db *gorm.DB) *gorm.DB { + return db.Order("smtp_headers.key ASC") + }) + } + return db +} + +// Insert inserts a new SMTP configuration +func (r *SMTPConfiguration) Insert( + ctx context.Context, + conf *model.SMTPConfiguration, +) (*uuid.UUID, error) { + id := uuid.New() + row := conf.ToDBMap() + row["id"] = id + AddTimestamps(row) + + res := r.DB. + Model(&database.SMTPConfiguration{}). + Create(row) + + if res.Error != nil { + return nil, res.Error + } + return &id, nil +} + +// GetByID gets a SMTP configuration by ID +func (r *SMTPConfiguration) GetByID( + ctx context.Context, + id *uuid.UUID, + option *SMTPConfigurationOption, +) (*model.SMTPConfiguration, error) { + db := r.preload(option, r.DB) + dbSMTP := &database.SMTPConfiguration{} + + res := db. + Where( + fmt.Sprintf( + "%s = ?", + TableColumnID(database.SMTP_CONFIGURATION_TABLE), + ), + id.String(), + ). + First(&dbSMTP) + + if res.Error != nil { + return nil, res.Error + } + return ToSMTPConfiguration(dbSMTP), nil +} + +// GetAllByCompanyID gets SMTP configurations by company ID +func (r *SMTPConfiguration) GetAll( + ctx context.Context, + companyID *uuid.UUID, + options *SMTPConfigurationOption, +) (*model.Result[model.SMTPConfiguration], error) { + result := model.NewEmptyResult[model.SMTPConfiguration]() + db := r.preload(options, r.DB) + db = withCompanyIncludingNullContext(db, companyID, database.SMTP_CONFIGURATION_TABLE) + db, err := useQuery( + db, + database.SMTP_CONFIGURATION_TABLE, + options.QueryArgs, + allowedSMTPConfigurationsSortBy..., + ) + if err != nil { + return result, errs.Wrap(err) + } + dbSMTPs := []database.SMTPConfiguration{} + res := db.Find(&dbSMTPs) + + if res.Error != nil { + return result, res.Error + } + + hasNextPage, err := useHasNextPage( + db, + database.SMTP_CONFIGURATION_TABLE, + options.QueryArgs, + allowedSMTPConfigurationsSortBy..., + ) + if err != nil { + return result, errs.Wrap(err) + } + result.HasNextPage = hasNextPage + + for _, dbSMTP := range dbSMTPs { + result.Rows = append(result.Rows, ToSMTPConfiguration(&dbSMTP)) + } + return result, nil +} + +// GetByNameAndCompanyID gets a SMTP configuration by name +func (r *SMTPConfiguration) GetByNameAndCompanyID( + ctx context.Context, + name *vo.String127, + companyID *uuid.UUID, // can be null + option *SMTPConfigurationOption, +) (*model.SMTPConfiguration, error) { + db := r.preload(option, r.DB) + dbSMTP := &database.SMTPConfiguration{} + whereCompany := fmt.Sprintf( + "%s IS NULL", + TableColumn(database.SMTP_CONFIGURATION_TABLE, "company_id"), + ) + if companyID != nil { + whereCompany = fmt.Sprintf( + "%s = ?", + TableColumn(database.SMTP_CONFIGURATION_TABLE, "company_id"), + ) + } + res := db. + Where( + fmt.Sprintf( + "%s = ? AND %s", + TableColumnName(database.SMTP_CONFIGURATION_TABLE), + whereCompany, + ), + name.String(), + companyID, + ). + First(&dbSMTP) + + if res.Error != nil { + return nil, res.Error + } + return ToSMTPConfiguration(dbSMTP), nil +} + +// UpdateByID updates a SMTP configuration by ID +func (r *SMTPConfiguration) UpdateByID( + ctx context.Context, + id *uuid.UUID, + conf *model.SMTPConfiguration, +) error { + row := conf.ToDBMap() + AddUpdatedAt(row) + res := r.DB. + Model(&database.SMTPConfiguration{}). + Where("id = ?", id). + Updates(row) + + if res.Error != nil { + return res.Error + } + return nil +} + +// AddHeader adds a header to a SMTP configuration +func (r *SMTPConfiguration) AddHeader( + ctx context.Context, + header *model.SMTPHeader, +) (*uuid.UUID, error) { + id := uuid.New() + updateMap := header.ToDBMap() + updateMap["id"] = id + AddTimestamps(updateMap) + + res := r.DB. + Model(&database.SMTPHeader{}). + Create(updateMap) + + if res.Error != nil { + return nil, res.Error + } + return &id, nil +} + +// RemoveHeader removes a header from a SMTP configuration +func (r *SMTPConfiguration) RemoveHeader( + ctx context.Context, + headerID *uuid.UUID, +) error { + res := r.DB. + Where("id = ?", headerID). + Delete(&database.SMTPHeader{}) + + if res.Error != nil { + return res.Error + } + return nil +} + +// DeleteByID deletes a SMTP configuration by ID +// including all headers attached to it +func (r *SMTPConfiguration) DeleteByID( + ctx context.Context, + id *uuid.UUID, +) error { + // delete headers + res := r.DB. + Where("smtp_configuration_id = ?", id). + Delete(&database.SMTPHeader{}) + + if res.Error != nil { + return res.Error + } + // delete smtp + res = r.DB. + Where("id = ?", id). + Delete(&database.SMTPConfiguration{}) + + if res.Error != nil { + return res.Error + } + return nil +} + +func ToSMTPConfiguration( + row *database.SMTPConfiguration, +) *model.SMTPConfiguration { + headers := []*model.SMTPHeader{} + for _, header := range row.Headers { + k := vo.NewString127Must(header.Key) + key := nullable.NewNullableWithValue(*k) + v := vo.NewString255Must(header.Value) + value := nullable.NewNullableWithValue(*v) + headers = append(headers, &model.SMTPHeader{ + ID: *header.ID, + CreatedAt: header.CreatedAt, + UpdatedAt: header.UpdatedAt, + Key: key, + Value: value, + SmtpID: nullable.NewNullableWithValue(*header.SMTPConfigurationID), + }) + } + id := nullable.NewNullableWithValue(row.ID) + companyID := nullable.NewNullNullable[uuid.UUID]() + if row.CompanyID != nil { + companyID.Set(*row.CompanyID) + } + name := nullable.NewNullableWithValue(*vo.NewString127Must(row.Name)) + host := nullable.NewNullableWithValue(*vo.NewString255Must(row.Host)) + port := nullable.NewNullableWithValue(*vo.NewPortMust(row.Port)) + username := nullable.NewNullableWithValue(*vo.NewOptionalString255Must(row.Username)) + password := nullable.NewNullableWithValue(*vo.NewOptionalString255Must(row.Password)) + ignoreCertErrors := nullable.NewNullableWithValue(row.IgnoreCertErrors) + + return &model.SMTPConfiguration{ + ID: id, + CreatedAt: row.CreatedAt, + UpdatedAt: row.UpdatedAt, + CompanyID: companyID, + Name: name, + Host: host, + Port: port, + Username: username, + Password: password, + IgnoreCertErrors: ignoreCertErrors, + Headers: headers, + } +} diff --git a/backend/repository/user.go b/backend/repository/user.go new file mode 100644 index 0000000..38d4a74 --- /dev/null +++ b/backend/repository/user.go @@ -0,0 +1,759 @@ +package repository + +import ( + "context" + "errors" + "fmt" + + "github.com/google/uuid" + "github.com/oapi-codegen/nullable" + "github.com/phishingclub/phishingclub/database" + "github.com/phishingclub/phishingclub/errs" + "github.com/phishingclub/phishingclub/model" + "github.com/phishingclub/phishingclub/vo" + "gorm.io/gorm" +) + +var sessionAllowedColumns = assignTableToColumns(database.SESSION_TABLE, []string{ + "created_at", + "updated_at", + "ip_address", +}) + +var userAllowedColumns = assignTableToColumns(database.USER_TABLE, []string{ + "created_at", + "updated_at", + "name", + "username", + "email", +}) + +// UserOption is a user option +type UserOption struct { + *vo.QueryArgs + + WithRole bool + WithCompany bool +} + +// User is a repository for User +type User struct { + DB *gorm.DB +} + +// / with preloads the role and company to a user +func (r *User) with(options *UserOption, db *gorm.DB) *gorm.DB { + if options.WithRole { + db = db.Preload("Role") + } + if options.WithCompany { + db = db.Preload("Company") + } + return db +} + +// SetupTOTP adds TOTP to a user +// adds the url and secret, but does not enable TOTP +func (r *User) SetupTOTP( + ctx context.Context, + userID *uuid.UUID, + secret string, + recoveryCodes string, + url string, +) error { + row := + map[string]any{ + "totp_enabled": false, + "totp_secret": secret, + "totp_recovery_code": recoveryCodes, + "totp_auth_url": url, + } + AddUpdatedAt(row) + result := r.DB. + Model(&database.User{}). + Where("id = ?", userID.String()). + Updates(row) + + if result.Error != nil { + return errs.Wrap(result.Error) + } + return nil +} + +// EnableTOTP enables TOTP for a user +func (r *User) EnableTOTP( + ctx context.Context, + userID *uuid.UUID, +) error { + row := map[string]any{ + "totp_enabled": true, + } + AddUpdatedAt(row) + result := r.DB. + Model(&database.User{}). + Where("id = ?", userID.String()). + Updates(row) + + if result.Error != nil { + return errs.Wrap(result.Error) + } + return nil +} + +// RemoveTOTP removes TOTP from a user +func (r *User) RemoveTOTP( + ctx context.Context, + userID *uuid.UUID, +) error { + row := map[string]any{ + "totp_enabled": false, + "totp_secret": "", + "totp_auth_url": "", + "totp_recovery_code": "", + } + AddUpdatedAt(row) + result := r.DB. + Model(&database.User{}). + Where("id = ?", userID.String()). + Updates(row) + + if result.Error != nil { + return errs.Wrap(result.Error) + } + return nil +} + +// GetTOTP gets TOTP from a user +func (r *User) GetTOTP( + ctx context.Context, + userID *uuid.UUID, +) (string, string, error) { + dbUser := &database.User{} + result := r.DB. + Select("totp_secret", "totp_auth_url"). + Where("id = ?", userID.String()). + First(&dbUser) + + if result.Error != nil { + return "", "", result.Error + } + return dbUser.TOTPSecret, dbUser.TOTPAuthURL, nil +} + +// GetMFARecoveryCode gets the TOTP secret for a user +func (r *User) GetMFARecoveryCode( + ctx context.Context, + userID *uuid.UUID, +) (string, error) { + dbUser := &database.User{} + result := r.DB. + Select("totp_recovery_code"). + Where("id = ?", userID.String()). + First(&dbUser) + + if result.Error != nil { + return "", result.Error + } + return dbUser.TOTPRecoveryCode, nil +} + +// IsTOTPEnabled checks if TOTP is enabled for a user +func (r *User) IsTOTPEnabled( + ctx context.Context, + userID *uuid.UUID, +) (bool, error) { + dbUser := &database.User{} + result := r.DB. + Select("totp_enabled"). + Where("id = ?", userID.String()). + First(&dbUser) + + if result.Error != nil { + return false, result.Error + } + return dbUser.TOTPEnabled, nil +} + +// Insert creates a new user +func (r *User) Insert( + ctx context.Context, + user *model.User, + passwordHash string, + ssoID string, +) (*uuid.UUID, error) { + id := uuid.New() + row := user.ToDBMap() + row["id"] = id + AddTimestamps(row) + row["password_hash"] = passwordHash + row["sso_id"] = ssoID + + res := r.DB. + Model(&database.User{}). + Create(row) + + if res.Error != nil { + return nil, res.Error + } + return &id, nil +} + +// UpdateByID updates a user by id +func (r *User) UpdateByID( + ctx context.Context, + id *uuid.UUID, + user *model.User, +) error { + row := user.ToDBMap() + AddUpdatedAt(row) + res := r.DB. + Model(&database.User{}). + Where("id = ?", id.String()). + Updates(row) + + if res.Error != nil { + return res.Error + } + return nil +} + +// UpsertAPIKey upserts api key +func (r *User) UpsertAPIKey( + ctx context.Context, + id *uuid.UUID, + key string, +) error { + row := map[string]any{} + AddUpdatedAt(row) + row["api_key"] = key + res := r.DB. + Model(&database.User{}). + Where("id = ?", id.String()). + Updates(row) + + if res.Error != nil { + return res.Error + } + return nil +} + +// RemoveAPIKey deletes a api key +func (r *User) RemoveAPIKey( + ctx context.Context, + id *uuid.UUID, +) error { + row := map[string]any{} + AddUpdatedAt(row) + row["api_key"] = "" + res := r.DB. + Model(&database.User{}). + Where("id = ?", id.String()). + Updates(row) + + if res.Error != nil { + return res.Error + } + return nil +} + +// GetAPIKey gets the users api key +func (r *User) GetAPIKey( + ctx context.Context, + id *uuid.UUID, +) (string, error) { + dbUser := &database.User{} + result := r.DB. + Select("api_key"). + Where("id = ?", id.String()). + First(&dbUser) + + if result.Error != nil { + return "", result.Error + } + return dbUser.APIKey, nil +} + +// GetAllAPIKeys gets alll the users api keys +// return map[apiKey]userID +func (r *User) GetAllAPIKeys( + ctx context.Context, +) (map[string]*uuid.UUID, error) { + apiKeys := map[string]*uuid.UUID{} + dbUsers := []database.User{} + result := r.DB. + Select("id, api_key"). + First(&dbUsers) + + if result.Error != nil { + return apiKeys, result.Error + } + for _, dbUser := range dbUsers { + apiKeys[dbUser.APIKey] = dbUser.ID + } + return apiKeys, nil +} + +// DeleteByID deletes a user by id +func (r *User) DeleteByID( + ctx context.Context, + id *uuid.UUID, +) error { + // anonymize user + // anon := uuid.New() + newName := fmt.Sprintf( + "deleted-%s", + uuid.New().String(), + ) + res := r.DB. + Table(database.USER_TABLE). + Where("id = ?", id.String()). + Updates(map[string]any{ + "name": newName, + "username": newName, + "email": fmt.Sprintf("%s@deleted.deleteduser", newName), + "password_hash": "", + "totp_enabled": false, + "totp_secret": nil, + "totp_auth_url": nil, + "totp_recovery_code": nil, + "sso_id": "", + }) + + if res.Error != nil { + return res.Error + } + + res = r.DB. + Where("id = ?", id.String()). + Delete(&database.User{}) + + if res.Error != nil { + return res.Error + } + return nil +} + +// GetAll gets all users +func (r *User) GetAll( + ctx context.Context, + options *UserOption, +) (*model.Result[model.User], error) { + result := model.NewEmptyResult[model.User]() + dbUsers := []database.User{} + + db, err := useQuery(r.DB, database.USER_TABLE, options.QueryArgs, userAllowedColumns...) + if err != nil { + return result, errs.Wrap(err) + } + dbRes := r.with(options, db). + Find(&dbUsers) + + if dbRes.Error != nil { + return result, dbRes.Error + } + + hasNextPage, err := useHasNextPage( + db, database.USER_TABLE, options.QueryArgs, userAllowedColumns..., + ) + if err != nil { + return result, errs.Wrap(err) + } + result.HasNextPage = hasNextPage + + for _, dbUsers := range dbUsers { + usr, err := ToUser(&dbUsers) + if err != nil { + return result, errs.Wrap(err) + } + result.Rows = append(result.Rows, usr) + } + return result, nil +} + +// GetByID gets a user by id, includding the role and company +func (r *User) GetByID( + ctx context.Context, + id *uuid.UUID, + options *UserOption, +) (*model.User, error) { + if id == nil { + return nil, errs.Wrap(errors.New("ID is nil")) + } + dbUser := &database.User{} + result := r.with(options, r.DB). + Where( + fmt.Sprintf( + "%s = ?", + TableColumnID(database.USER_TABLE), + ), + id.String(), + ). + First(&dbUser) + + if result.Error != nil { + return nil, result.Error + } + return ToUser(dbUser) +} + +// GetByUsername gets a user by username +func (r *User) GetByUsername( + ctx context.Context, + username *vo.Username, + options *UserOption, +) (*model.User, error) { + dbUser := &database.User{} + result := r.with(options, r.DB). + Where( + fmt.Sprintf( + "%s = ?", + TableColumn(database.USER_TABLE, "username"), + ), + username.String(), + ). + First(&dbUser) + + if result.Error != nil { + return nil, result.Error + } + return ToUser(dbUser) +} + +// GetByEmail gets a user by email +func (r *User) GetByEmail( + ctx context.Context, + email *vo.Email, + options *UserOption, +) (*model.User, error) { + dbUser := &database.User{} + result := r.with(options, r.DB). + Where( + fmt.Sprintf( + "%s = ?", + TableColumn(database.USER_TABLE, "email"), + ), + email.String(), + ). + First(&dbUser) + + if result.Error != nil { + return nil, result.Error + } + return ToUser(dbUser) +} + +func (r *User) GetPasswordHashByUsername( + ctx context.Context, + username *vo.Username, +) (string, error) { + dbUser := &database.User{} + result := r.DB. + Select("password_hash"). + Where("username = ?", username.String()). + First(&dbUser) + + if result.Error != nil { + return "", result.Error + } + return dbUser.PasswordHash, nil +} + +// GetBySessionID gets a user by session id +// this does not validate the session +func (r *User) GetBySessionID( + ctx context.Context, + sessionID *uuid.UUID, + options *UserOption, +) (*model.User, error) { + dbUser := &database.User{} + db, err := useQuery(r.DB, database.SESSION_TABLE, options.QueryArgs, allowedSessionColumns...) + + if err != nil { + return nil, errs.Wrap(err) + } + result := r.with(options, db). + Joins("JOIN sessions ON sessions.user_id = users.id"). + Where("sessions.id = ?", sessionID.String()). + First(&dbUser) + + if result.Error != nil { + return nil, result.Error + } + return ToUser(dbUser) + +} + +// updateUsernameByID updates the username by id +func (r *User) updateUsernameByID( + tx *gorm.DB, + id *uuid.UUID, + username *vo.Username, +) error { + row := map[string]any{ + "username": username.String(), + } + AddUpdatedAt(row) + result := tx. + Model(&database.User{}). + Where("id = ?", id.String()). + Updates(row) + + if result.Error != nil { + return errs.Wrap(result.Error) + } + return nil +} + +// UpdateUserToSSO removes the password hash and sets a sso id +func (r *User) UpdateUserToSSO( + ctx context.Context, + id *uuid.UUID, + ssoID string, +) error { + result := r.DB. + Table(database.USER_TABLE). + Where("id = ?", id.String()). + Updates(map[string]interface{}{ + "password_hash": "", + "sso_id": ssoID, + }) + + if result.Error != nil { + return errs.Wrap(result.Error) + } + + return nil +} + +// UpdateUserToNoSSO removes the SSO id +// f +func (r *User) UpdateUserToNoSSO( + ctx context.Context, + id *uuid.UUID, +) error { + result := r.DB. + Table(database.USER_TABLE). + Where("id = ?", id.String()). + Updates(map[string]interface{}{ + "sso_id": "", + }) + + if result.Error != nil { + return errs.Wrap(result.Error) + } + + return nil +} + +// UpdateUsernameByID updates the username by id +func (r *User) UpdateUsernameByID( + ctx context.Context, + id *uuid.UUID, + username *vo.Username, +) error { + return r.updateUsernameByID(r.DB, id, username) +} + +// UpdateUsernameByIDWithTransaction updates the username by id +func (r *User) UpdateUsernameByIDWithTransaction( + ctx context.Context, + tx *gorm.DB, + id *uuid.UUID, + username *vo.Username, +) error { + return r.updateUsernameByID(tx, id, username) +} + +// updateFullNameByID updates the full name by id +func (r *User) updateFullNameByID( + tx *gorm.DB, + id *uuid.UUID, + name *vo.UserFullname, +) error { + row := map[string]any{ + "name": name.String(), + } + AddUpdatedAt(row) + result := tx. + Model(&database.User{}). + Where("id = ?", id.String()). + Updates(row) + + if result.Error != nil { + return errs.Wrap(result.Error) + } + return nil +} + +// UpdateFullNameByID updates the full name by id +func (r *User) UpdateFullNameByID( + ctx context.Context, + id *uuid.UUID, + name *vo.UserFullname, +) error { + return r.updateFullNameByID(r.DB, id, name) +} + +// UpdateFullNameByIDWithTransaction updates the full name by id +func (r *User) UpdateFullNameByIDWithTransaction( + ctx context.Context, + tx *gorm.DB, + id *uuid.UUID, + name *vo.UserFullname, +) error { + return r.updateFullNameByID(tx, id, name) +} + +// updateEmailByID updates the email by id +func (r *User) updateEmailByID( + tx *gorm.DB, + id *uuid.UUID, + email *vo.Email, +) error { + row := map[string]any{ + "email": email.String(), + } + AddUpdatedAt(row) + result := tx. + Model(&database.User{}). + Where("id = ?", id.String()). + Updates(row) + + if result.Error != nil { + return errs.Wrap(result.Error) + } + return nil +} + +// UpdateEmailByID updates the email by id +func (r *User) UpdateEmailByID( + ctx context.Context, + id *uuid.UUID, + email *vo.Email, +) error { + return r.updateEmailByID(r.DB, id, email) +} + +// UpdateEmailByIDWithTransaction updates the email by id +func (r *User) UpdateEmailByIDWithTransaction( + ctx context.Context, + tx *gorm.DB, + id *uuid.UUID, + email *vo.Email, +) error { + return r.updateEmailByID(tx, id, email) +} + +// updatePasswordHashByID updates the password hash by id +func (r *User) updatePasswordHashByID( + tx *gorm.DB, + id *uuid.UUID, + passwordHash string, +) error { + row := map[string]interface{}{ + "password_hash": passwordHash, + } + AddUpdatedAt(row) + result := tx. + Model(&database.User{}). + Where("id = ?", id.String()). + Updates(row) + + if result.Error != nil { + return errs.Wrap(result.Error) + } + return nil +} + +// UpdatePasswordHashByID updates the password hash by id +func (r *User) UpdatePasswordHashByID( + ctx context.Context, + id *uuid.UUID, + passwordHash string, +) error { + return r.updatePasswordHashByID(r.DB, id, passwordHash) +} + +// UpdatePasswordHashByIDWithTransaction updates the password hash by id +func (r *User) UpdatePasswordHashByIDWithTransaction( + ctx context.Context, + tx *gorm.DB, + id *uuid.UUID, + passwordHash string, +) error { + return r.updatePasswordHashByID(tx, id, passwordHash) +} + +// updatePasswordHashByID updates the password hash by id +func (r *User) updatePasswordHashByUsername( + tx *gorm.DB, + username *vo.Username, + passwordHash string, +) error { + row := map[string]interface{}{ + "password_hash": passwordHash, + "require_password_renew": false, + } + AddUpdatedAt(row) + result := tx. + Model(&database.User{}). + Where("username = ?", username.String()). + Updates(row) + if result.Error != nil { + return errs.Wrap(result.Error) + } + return nil +} + +// UpdatePasswordHashByUsername updates the password hash by id +func (r *User) UpdatePasswordHashByUsername( + ctx context.Context, + username *vo.Username, + passwordHash string, +) error { + return r.updatePasswordHashByUsername(r.DB, username, passwordHash) +} + +// UpdatePasswordHashByUsernameWithTransaction updates the password hash by id +func (r *User) UpdatePasswordHashByUsernameWithTransaction( + ctx context.Context, + tx *gorm.DB, + username *vo.Username, + passwordHash string, +) error { + return r.updatePasswordHashByUsername(tx, username, passwordHash) +} + +func ToUser(row *database.User) (*model.User, error) { + id := nullable.NewNullableWithValue(*row.ID) + companyID := nullable.NewNullNullable[uuid.UUID]() + if row.CompanyID != nil { + companyID.Set(*row.CompanyID) + } + roleID := nullable.NewNullableWithValue(*row.RoleID) + userFullname := nullable.NewNullableWithValue(*vo.NewUserFullnameMust(row.Name)) + username := nullable.NewNullableWithValue(*vo.NewUsernameMust(row.Username)) + email := nullable.NewNullableWithValue(*vo.NewEmailMust(row.Email)) + ssoID := nullable.NewNullableWithValue(row.SSOID) + var role *model.Role + if row.Role != nil { + role = ToRole(row.Role) + } + var company *model.Company + if row.Company != nil { + company = ToCompany(row.Company) + } + + return &model.User{ + ID: id, + Name: userFullname, + Username: username, + Email: email, + RoleID: roleID, + Role: role, + RequirePasswordRenew: nullable.NewNullableWithValue(row.RequirePasswordRenew), + CompanyID: companyID, + Company: company, + SSOID: ssoID, + }, nil +} diff --git a/backend/repository/utils.go b/backend/repository/utils.go new file mode 100644 index 0000000..457a5e4 --- /dev/null +++ b/backend/repository/utils.go @@ -0,0 +1,407 @@ +package repository + +import ( + "context" + "fmt" + "slices" + "strings" + + "github.com/go-errors/errors" + "github.com/google/uuid" + "github.com/phishingclub/phishingclub/errs" + "github.com/phishingclub/phishingclub/utils" + "github.com/phishingclub/phishingclub/vo" + "gorm.io/gorm" +) + +var defaultAllowdSearchColumns = []string{ + "name", +} + +var defaultAllowdColumns = map[string]struct{}{ + "name": {}, + "created_at": {}, + "updated_at": {}, +} + +func withOffsetLimit(db *gorm.DB, offset, limit int) *gorm.DB { + if offset == 0 && limit == 0 { + return db + } + return db.Offset(offset).Limit(limit) +} + +func WithOrderBy(db *gorm.DB, orderBy string, desc bool, allowed ...string) (*gorm.DB, error) { + if orderBy == "" { + return db, nil + } + // if no allowed columns are provided, use the default + // else check if the column is allowed + if len(allowed) == 0 { + if _, ok := defaultAllowdColumns[orderBy]; !ok { + return db, fmt.Errorf( + "not known or allowed column: %s - allowd: %s", + orderBy, + defaultAllowdColumns, + ) + } + } else { + if !slices.Contains(allowed, orderBy) { + return db, fmt.Errorf( + "not known or allowed column: %s - allowd: %s", + orderBy, + allowed, + ) + } + } + // TODO this ruins all indexes performance but is a quick fix to work for all databases + // to ensure that the order by is case insensitive + // the real solution is to use a case insensitive collation + // but these differ per database, another option would be LOWER indexes for some columns however this is also not ideal + //orderBy = fmt.Sprintf("LOWER(%s)", orderBy) + + if !desc { + return db.Order(orderBy + " COLLATE NOCASE ASC"), nil + } + return db.Order(orderBy + " COLLATE NOCASE DESC"), nil +} + +func WithOrderByOnTable(db *gorm.DB, table string, orderBy string, desc bool, allowed ...string) (*gorm.DB, error) { + if orderBy == "" { + return db, nil + } + // only check default columns if no allowed columns are provided + if _, ok := defaultAllowdColumns[orderBy]; !ok && len(allowed) == 0 { + return db, fmt.Errorf("invalid column: %s", orderBy) + } + for _, allowedOrderBy := range allowed { + if !slices.Contains(allowed, orderBy) { + return db, fmt.Errorf("invalid column: %s", allowedOrderBy) + } + } + + // TODO this ruins all indexes performance but is a quick fix to work for all databases + // to ensure that the order by is case insensitive + // the real solution is to use a case insensitive collation + // but these differ per database, another option would be LOWER indexes for some columns however this is also not ideal + if !desc { + return db.Order( + //fmt.Sprintf("LOWER(`%s`.`%s`) ASC", table, orderBy), + fmt.Sprintf("LOWER(`%s`.`%s`) COLLATE NOCAS ASC", table, orderBy), + ), nil + } + return db.Order( + //fmt.Sprintf("LOWER(`%s`.`%s`) DESC", table, orderBy), + fmt.Sprintf("LOWER(`%s`.`%s`) NO COLLATE DESC", table, orderBy), + ), nil +} + +func assignTableToColumn(table, column string) string { + // if the column already contains a dot, it is already formatted + if strings.Contains(column, ".") { + return column + } + return fmt.Sprintf("`%s`.`%s`", table, column) +} + +func assignTableToColumns(table string, columns []string) []string { + for i, column := range columns { + columns[i] = assignTableToColumn(table, column) + } + return columns +} + +func useQuery(db *gorm.DB, tableName string, q *vo.QueryArgs, allowdColumns ...string) (*gorm.DB, error) { + if q == nil { + return db, nil + } + db = withOffsetLimit(db, q.Offset, q.Limit) + db, err := WithOrderBy(db, assignTableToColumn(tableName, q.OrderBy), q.Desc, allowdColumns...) + if err != nil { + return db, errs.Wrap(err) + } + // handle search + if q.Search != "" { + searchColumns := []string{} + // add columns that are allowed to be searched in + for _, column := range allowdColumns { + searchColumns = append( + searchColumns, + assignTableToColumn(tableName, column), + ) + } + // if no columns has been added, use the default + if len(searchColumns) == 0 { + searchColumns = assignTableToColumns(tableName, defaultAllowdSearchColumns) + } + // remove search symbols + search := strings.ReplaceAll(q.Search, "%", " ") + search = strings.ReplaceAll(search, "_", " ") + // build the LIKE query + var searches []interface{} + q := "" + for i, column := range searchColumns { + if i == 0 { + q += fmt.Sprintf("%s LIKE ?", column) + } else { + q += fmt.Sprintf(" OR %s LIKE ?", column) + } + searches = append(searches, "%"+search+"%") + } + db.Where(q, searches...) + } + return db, errs.Wrap(err) +} + +func useHasNextPage( + db *gorm.DB, + tableName string, + q *vo.QueryArgs, + allowdColumns ...string, +) (bool, error) { + if q == nil { + return false, nil + } + if q.Limit == 0 && q.Offset == 0 { + return false, nil + } + db = withOffsetLimit(db, q.Offset+q.Limit, 1) + db, err := WithOrderBy(db, assignTableToColumn(tableName, q.OrderBy), q.Desc, allowdColumns...) + if err != nil { + return false, errs.Wrap(err) + } + // handle search + if q.Search != "" { + searchColumns := []string{} + // add columns that are allowed to be searched in + for _, column := range allowdColumns { + searchColumns = append( + searchColumns, + assignTableToColumn(tableName, column), + ) + } + // if no columns has been added, use the default + if len(searchColumns) == 0 { + searchColumns = assignTableToColumns(tableName, defaultAllowdSearchColumns) + } + // remove search symbols + search := strings.ReplaceAll(q.Search, "%", " ") + search = strings.ReplaceAll(search, "_", " ") + // build the LIKE query + var searches []interface{} + q := "" + for i, column := range searchColumns { + if i == 0 { + q += fmt.Sprintf("%s LIKE ?", column) + } else { + q += fmt.Sprintf(" OR %s LIKE ?", column) + } + searches = append(searches, "%"+search+"%") + } + db.Where(q, searches...) + } + // Check if there's at least one record + var exists bool + err = db.Select("1").Find(&exists).Error + if errors.Is(err, gorm.ErrRecordNotFound) { + return false, nil + } + if err != nil { + return false, errs.Wrap(err) + } + return exists, nil +} + +/* +func useQueryWithTable(db *gorm.DB, table string, q *vo.QueryArgs, allowdColumns ...string) (*gorm.DB, error) { + if q == nil { + return db, nil + } + db = withOffsetLimit(db, q.Offset, q.Limit) + db, err := WithOrderByOnTable(db, table, q.OrderBy, q.Desc, allowdColumns...) + // handle search + if q.Search != "" { + searchColumns := []string{} + // add columns that are allowed to be searched in + for _, column := range allowdColumns { + if column == "created_at" || column == "updated_at" { + continue + } + searchColumns = append(searchColumns, column) + } + // if no columns has been added, use the default + if len(searchColumns) == 0 { + searchColumns = defaultAllowdSearchColumns + } + // remove search symbols + search := strings.ReplaceAll(q.Search, "%", " ") + search = strings.ReplaceAll(search, "_", " ") + // build the LIKE query + // todo perhaps this needs table prefix also + var searches []interface{} + q := "" + for i, column := range searchColumns { + if i == 0 { + q += column + " LIKE ?" + } else { + q += " OR " + column + " LIKE ?" + } + searches = append(searches, "%"+search+"%") + } + db.Where(q, searches...) + } + return db,errs.Wrap(err) +} +*/ + +func SelectTable(tableName string) []string { + return []string{fmt.Sprintf("`%s`.*", tableName)} +} + +// SelectColumnAs creates a list of columns with aliases column is map[column]alias +func SelectColumnAs(tableName string, columns map[string]string) []string { + var cols []string + for key, value := range columns { + cols = append(cols, fmt.Sprintf("`%s`.`%s` AS %s", tableName, value, key)) + } + return cols +} + +func useSelect(db *gorm.DB, fields []string) *gorm.DB { + if len(fields) == 0 { + return db + } + return db.Select(fields) +} + +func LeftJoinOn(tableLeft, columnLeft, tableRight, columnRight string) string { + return fmt.Sprintf("LEFT JOIN `%s` on `%s`.`%s` = `%s`.`%s`", tableRight, tableLeft, columnLeft, tableRight, columnRight) +} + +func LeftJoinOnWithAlias(tableLeft, columnLeft, tableRight, columnRight, alias string) string { + return fmt.Sprintf("LEFT JOIN `%s` '%s' on `%s`.`%s` = `%s`.`%s`", tableRight, alias, tableLeft, columnLeft, alias, columnRight) +} + +// withCompanyTableContext adds a company context to the query +func withCompanyIncludingNullContext(db *gorm.DB, companyID *uuid.UUID, tableName string) *gorm.DB { + column := fmt.Sprintf("`%s`.company_id", tableName) + if companyID != nil { + return db.Where( + fmt.Sprintf("(%s = ? OR %s IS NULL)", column, column), companyID) + } + return db.Where( + fmt.Sprintf("(%s IS NULL)", column), + ) +} + +// withCompany adds a where company id +// if companyID is NIL it will add a companyID IS NULL +func whereCompany(db *gorm.DB, tableName string, companyID *uuid.UUID) *gorm.DB { + column := fmt.Sprintf("`%s`.company_id", tableName) + if companyID == nil { + return db.Where(fmt.Sprintf("%s IS NULL", column)) + } else { + return db.Where( + fmt.Sprintf("%s = ?", column), companyID) + } +} + +// withCompany adds a where company id is null +func whereCompanyIsNull(db *gorm.DB, tableName string) *gorm.DB { + column := fmt.Sprintf("`%s`.company_id", tableName) + return db.Where( + fmt.Sprintf("%s IS NULL", column)) +} + +// AddTimestamps adds created_at and updated_at to a map +func AddTimestamps(row map[string]interface{}) { + now := utils.NowRFC3339UTC() + row["created_at"] = now + row["updated_at"] = now +} + +// AddCreatedAt adds updated_at to a map +func AddUpdatedAt(row map[string]interface{}) { + row["updated_at"] = utils.NowRFC3339UTC() +} + +// CheckColumnIsUnique checks if a column is unique within a company and globally +// if companyID is nil, it is global no other row should use the name, period. +// if companyID is set, then the same column id should not use it, and no null company ID +// columns is not sqli safe +func CheckColumnIsUnique( + ctx context.Context, + db *gorm.DB, + table string, + column string, + value string, + companyID *uuid.UUID, + currentID *uuid.UUID, // if currentID is set, it is allowed to use the same value +) (bool, error) { + var count int64 + tx := db.Table(table) + + if companyID != nil { + tx = tx.Where(column+" = ? AND (company_id = ? OR company_id IS NULL)", value, companyID) + } else { + tx = tx.Where(column+" = ?", value) + } + if currentID != nil { + tx = tx.Where("id != ?", currentID) + } + + result := tx.Count(&count) + if result.Error != nil { + return false, result.Error + } + return count == 0, nil +} + +func UUIDsToStrings(ids []*uuid.UUID) []string { + args := []string{} + for _, s := range ids { + args = append(args, s.String()) + } + return args +} + +// CheckNameIsUnique checks if a name is unique within a company and globally +// if companyID is nil, it is global no other row should use the name, period. +// if companyID is set, then the same company id should not use it, and no null company ID +func CheckNameIsUnique( + ctx context.Context, + db *gorm.DB, + table string, + name string, + companyID *uuid.UUID, + currentID *uuid.UUID, +) (bool, error) { + return CheckColumnIsUnique(ctx, db, table, "name", name, companyID, currentID) +} + +func TableSelect(selects ...string) string { + return strings.Join( + selects, + ",", + ) +} + +func TableColumn(tableName, columnName string) string { + return fmt.Sprintf("`%s`.`%s`", tableName, columnName) +} + +func TableColumnAlias(tableName, columnName, alias string) string { + return fmt.Sprintf("`%s`.`%s` AS `%s`", tableName, columnName, alias) +} + +func TableColumnAll(tableName string) string { + return fmt.Sprintf("`%s`.*", tableName) +} + +func TableColumnID(tableName string) string { + return TableColumn(tableName, "id") +} + +func TableColumnName(tableName string) string { + return TableColumn(tableName, "name") +} diff --git a/backend/repository/webhook.go b/backend/repository/webhook.go new file mode 100644 index 0000000..7a54a52 --- /dev/null +++ b/backend/repository/webhook.go @@ -0,0 +1,204 @@ +package repository + +import ( + "context" + "fmt" + + "github.com/google/uuid" + "github.com/oapi-codegen/nullable" + "github.com/phishingclub/phishingclub/database" + "github.com/phishingclub/phishingclub/errs" + "github.com/phishingclub/phishingclub/model" + "github.com/phishingclub/phishingclub/vo" + "gorm.io/gorm" +) + +var webhookAllowedColumns = assignTableToColumns(database.WEBHOOK_TABLE, []string{ + "created_at", + "updated_at", + "name", + "allowed", +}) + +type WebhookOption struct { + *vo.QueryArgs +} + +type Webhook struct { + DB *gorm.DB +} + +// Insert inserts a new webhook +func (r *Webhook) Insert( + ctx context.Context, + webhook *model.Webhook, +) (*uuid.UUID, error) { + id := uuid.New() + row := webhook.ToDBMap() + row["id"] = id + AddTimestamps(row) + + res := r.DB. + Model(&database.Webhook{}). + Create(row) + + if res.Error != nil { + return nil, res.Error + } + return &id, nil +} + +// GetAll gets all webhooks +func (r *Webhook) GetAll( + ctx context.Context, + companyID *uuid.UUID, + options *WebhookOption, +) (*model.Result[model.Webhook], error) { + result := model.NewEmptyResult[model.Webhook]() + db := withCompanyIncludingNullContext(r.DB, companyID, database.WEBHOOK_TABLE) + db, err := useQuery(db, database.WEBHOOK_TABLE, options.QueryArgs, webhookAllowedColumns...) + if err != nil { + return result, errs.Wrap(err) + } + var rows []*database.Webhook + res := db. + Find(&rows) + + if res.Error != nil { + return result, res.Error + } + + hasNextPage, err := useHasNextPage(db, database.WEBHOOK_TABLE, options.QueryArgs, webhookAllowedColumns...) + if err != nil { + return result, errs.Wrap(err) + } + result.HasNextPage = hasNextPage + + for _, row := range rows { + result.Rows = append(result.Rows, ToWebhook(row)) + } + return result, nil +} + +// GetAllByCompanyID gets all webhooks +func (r *Webhook) GetAllByCompanyID( + ctx context.Context, + companyID *uuid.UUID, + options *WebhookOption, +) ([]*model.Webhook, error) { + out := []*model.Webhook{} + db := whereCompany(r.DB, database.WEBHOOK_TABLE, companyID) + db, err := useQuery(db, database.WEBHOOK_TABLE, options.QueryArgs, webhookAllowedColumns...) + if err != nil { + return out, errs.Wrap(err) + } + var rows []*database.Webhook + res := db. + Find(&rows) + + if res.Error != nil { + return out, res.Error + } + for _, row := range rows { + out = append(out, ToWebhook(row)) + } + return out, nil +} + +// GetByID gets a webhook by id +func (r *Webhook) GetByID( + ctx context.Context, + id *uuid.UUID, +) (*model.Webhook, error) { + var row database.Webhook + res := r.DB. + Where( + fmt.Sprintf( + "%s = ?", + TableColumnID(database.WEBHOOK_TABLE), + ), + id.String(), + ). + First(&row) + + if res.Error != nil { + return nil, res.Error + } + + return ToWebhook(&row), nil +} + +// GetByNames gets webhooks by names +func (r *Webhook) GetByName( + ctx context.Context, + name *vo.String127, +) (*model.Webhook, error) { + var row database.Webhook + res := r.DB. + Where( + fmt.Sprintf( + "%s = ?", + TableColumnName(database.WEBHOOK_TABLE), + ), + name.String(), + ). + First(&row) + + if res.Error != nil { + return nil, res.Error + } + + return ToWebhook(&row), nil +} + +// UpdateByID updates a webhook by id +func (r *Webhook) UpdateByID( + ctx context.Context, + id *uuid.UUID, + webhook *model.Webhook, +) error { + row := webhook.ToDBMap() + AddUpdatedAt(row) + + res := r.DB. + Model(&database.Webhook{}). + Where("id = ?", id). + Updates(row) + + return res.Error +} + +// DeleteByID deletes a webhook by id +func (r *Webhook) DeleteByID( + ctx context.Context, + id *uuid.UUID, +) error { + res := r.DB. + Where("id = ?", id). + Delete(&database.Webhook{}) + + return res.Error +} + +func ToWebhook( + row *database.Webhook, +) *model.Webhook { + id := nullable.NewNullableWithValue(*row.ID) + companyID := nullable.NewNullNullable[uuid.UUID]() + if row.CompanyID != nil { + companyID.Set(*row.CompanyID) + } + name := nullable.NewNullableWithValue(*vo.NewString127Must(row.Name)) + url := nullable.NewNullableWithValue(*vo.NewString1024Must(row.URL)) + secret := nullable.NewNullableWithValue(*vo.NewOptionalString1024Must(row.Secret)) + + return &model.Webhook{ + ID: id, + CreatedAt: row.CreatedAt, + UpdatedAt: row.UpdatedAt, + CompanyID: companyID, + Name: name, + URL: url, + Secret: secret, + } +} diff --git a/backend/seed/apiSender_dev.go b/backend/seed/apiSender_dev.go new file mode 100644 index 0000000..b3cb427 --- /dev/null +++ b/backend/seed/apiSender_dev.go @@ -0,0 +1,100 @@ +//go:build dev + +package seed + +import ( + "context" + + "github.com/go-errors/errors" + + "github.com/google/uuid" + "github.com/oapi-codegen/nullable" + "github.com/phishingclub/phishingclub/errs" + "github.com/phishingclub/phishingclub/model" + "github.com/phishingclub/phishingclub/repository" + "github.com/phishingclub/phishingclub/vo" + "gorm.io/gorm" +) + +// SeedDevelopmentAPISenders seeds api sender templates +func SeedDevelopmentAPISenders( + apiSenderRepository *repository.APISender, +) error { + apiSenders := []struct { + Name string + APIKey string + CustomField1 string + CustomField2 string + CustomField3 string + CustomField4 string + RequestMethod string + RequestURL string + RequestHeaders string + RequestBody string + ExpectedResponseStatusCode int + ExpectedResponseHeaders string + ExpectedResponseBody string + }{ + { + Name: TEST_API_SENDER_NAME_1, + APIKey: "BAC0N#CH1P5", + CustomField1: "5200", + CustomField2: "ALERT", + CustomField3: "", + CustomField4: "", + RequestMethod: "POST", + RequestURL: "http://api-test-server/api-sender/{{urlEscape .CustomField1}}", + RequestHeaders: "Content-Type: application/json", + RequestBody: "{\"to\": \"{{urlEscape .To}}\", \"from\": \"{{urlEscape .CustomField2}}\", \"content\": \"{{.Content}}\", \"apiKey\": \"{{urlEscape .APIKey}}\" }", + ExpectedResponseStatusCode: 200, + ExpectedResponseHeaders: "Content-Type: application/json", + ExpectedResponseBody: "message sent", + }, + } + for _, apiSender := range apiSenders { + id := nullable.NewNullableWithValue(uuid.New()) + apiKey := nullable.NewNullableWithValue(*vo.NewOptionalString255Must(apiSender.APIKey)) + name := nullable.NewNullableWithValue(*vo.NewString64Must(apiSender.Name)) + customField1 := nullable.NewNullableWithValue(*vo.NewOptionalString255Must(apiSender.CustomField1)) + customField2 := nullable.NewNullableWithValue(*vo.NewOptionalString255Must(apiSender.CustomField2)) + customField3 := nullable.NewNullableWithValue(*vo.NewOptionalString255Must(apiSender.CustomField3)) + customField4 := nullable.NewNullableWithValue(*vo.NewOptionalString255Must(apiSender.CustomField4)) + requestMethod := nullable.NewNullableWithValue(*vo.NewHTTPMethodMust(apiSender.RequestMethod)) + requestURL := nullable.NewNullableWithValue(*vo.NewString255Must(apiSender.RequestURL)) + requestHeaders := nullable.NewNullNullable[model.APISenderHeaders]() + requestBody := nullable.NewNullableWithValue(*vo.NewOptionalString1MBMust(apiSender.RequestBody)) + expectedResponseStatusCode := nullable.NewNullableWithValue(apiSender.ExpectedResponseStatusCode) + expectedResponseHeaders := nullable.NewNullNullable[model.APISenderHeaders]() + expectedResponseBody := nullable.NewNullableWithValue(*vo.NewOptionalString1MBMust(apiSender.ExpectedResponseBody)) + + apiSender := model.APISender{ + ID: id, + APIKey: apiKey, + Name: name, + CustomField1: customField1, + CustomField2: customField2, + CustomField3: customField3, + CustomField4: customField4, + RequestMethod: requestMethod, + RequestURL: requestURL, + RequestHeaders: requestHeaders, + RequestBody: requestBody, + ExpectedResponseStatusCode: expectedResponseStatusCode, + ExpectedResponseHeaders: expectedResponseHeaders, + ExpectedResponseBody: expectedResponseBody, + } + n := apiSender.Name.MustGet() + a, err := apiSenderRepository.GetByName(context.TODO(), &n, nil, nil) + if err != nil && !errors.Is(err, gorm.ErrRecordNotFound) { + return errors.Errorf("%w: %s", errs.ErrDBSeedFailure, err) + } + if a != nil { + continue + } + _, err = apiSenderRepository.Insert(context.TODO(), &apiSender) + if err != nil { + return errors.Errorf("%w: %s", errs.ErrDBSeedFailure, err) + } + } + return nil +} diff --git a/backend/seed/campaignTemplate_dev.go b/backend/seed/campaignTemplate_dev.go new file mode 100644 index 0000000..d5620cb --- /dev/null +++ b/backend/seed/campaignTemplate_dev.go @@ -0,0 +1,254 @@ +//go:build dev + +package seed + +import ( + "context" + + "github.com/go-errors/errors" + + "github.com/google/uuid" + "github.com/oapi-codegen/nullable" + "github.com/phishingclub/phishingclub/data" + "github.com/phishingclub/phishingclub/errs" + "github.com/phishingclub/phishingclub/model" + "github.com/phishingclub/phishingclub/repository" + "github.com/phishingclub/phishingclub/vo" + "gorm.io/gorm" +) + +// SeedDevelopmentCampaignTemplates seeds templates +func SeedDevelopmentCampaignTemplates( + apiRepository *repository.APISender, + domainRepository *repository.Domain, + pageRepository *repository.Page, + emailRepository *repository.Email, + smtpRepository *repository.SMTPConfiguration, + templateRepository *repository.CampaignTemplate, + identifierRepository *repository.Identifier, +) error { + templates := []struct { + Name string + DomainName string + LandingPageName string + LandingPageTypeName string + BeforeLandingPageName string + BeforeLandingPageTypeName string + AfterLandingPageName string + AfterLandingPageTypeName string + AfterLandingPageRedirectURL string + EmailName string + SMTPConfigName string + APISenderName string + UrlIdentifierName string + StateIdentifierName string + }{ + { + Name: "Phishing", + DomainName: TEST_DOMAIN_NAME_1, + LandingPageName: TEST_PAGE_NAME_1, + LandingPageTypeName: data.PAGE_TYPE_LANDING, + EmailName: TEST_EMAIL_NAME_1, + SMTPConfigName: TEST_SMTP_CONFIGURATION_NAME_1, + UrlIdentifierName: TEST_URL_IDENTIFIER_NAME, + StateIdentifierName: TEST_STATE_IDENTIFIER_NAME, + }, + { + Name: "Test API - Forgot password", + DomainName: TEST_DOMAIN_NAME_1, + LandingPageName: TEST_PAGE_NAME_1, + LandingPageTypeName: data.PAGE_TYPE_LANDING, + EmailName: TEST_EMAIL_NAME_1, + APISenderName: TEST_API_SENDER_NAME_1, + UrlIdentifierName: TEST_URL_IDENTIFIER_NAME, + StateIdentifierName: TEST_STATE_IDENTIFIER_NAME, + }, + } + for _, template := range templates { + urlIdentifier, err := identifierRepository.GetByName( + context.Background(), + template.UrlIdentifierName, + ) + if err != nil && !errors.Is(err, gorm.ErrRecordNotFound) { + return errors.Errorf("%w: %s", errs.ErrDBSeedFailure, err) + } + if urlIdentifier == nil { + return errors.Errorf("%w: %s", errs.ErrDBSeedFailure, "URL param key not found") + } + stateKeyIdentifier, err := identifierRepository.GetByName( + context.Background(), + template.StateIdentifierName, + ) + if err != nil && !errors.Is(err, gorm.ErrRecordNotFound) { + return errors.Errorf("%w: %s", errs.ErrDBSeedFailure, err) + } + if stateKeyIdentifier == nil { + return errors.Errorf("%w: %s", errs.ErrDBSeedFailure, "state param key not found") + } + domainName, err := vo.NewString255(template.DomainName) + if err != nil { + return errors.Errorf("%w: %s", errs.ErrDBSeedFailure, err) + } + domain, err := domainRepository.GetByName( + context.Background(), + domainName, + &repository.DomainOption{}, + ) + if err != nil && !errors.Is(err, gorm.ErrRecordNotFound) { + return errors.Errorf("%w: %s", errs.ErrDBSeedFailure, err) + } + if domain == nil { + return errors.Errorf("%w: %s", errs.ErrDBSeedFailure, "domain not found") + } + landingPageName, err := vo.NewString64(template.LandingPageName) + if err != nil { + return errors.Errorf("%w: %s", errs.ErrDBSeedFailure, err) + } + var beforeLandingPageID nullable.Nullable[uuid.UUID] + if template.BeforeLandingPageName != "" { + beforeLandingPageName, err := vo.NewString64(template.BeforeLandingPageName) + if err != nil { + return errors.Errorf("%w: %s", errs.ErrDBSeedFailure, err) + } + beforeLandingPage, err := pageRepository.GetByNameAndCompanyID( + context.Background(), + beforeLandingPageName, + nil, + &repository.PageOption{}, + ) + if err != nil && !errors.Is(err, gorm.ErrRecordNotFound) { + return errors.Errorf("%w: %s", errs.ErrDBSeedFailure, err) + } + if beforeLandingPage == nil { + return errors.Errorf("%w: %s", errs.ErrDBSeedFailure, "before landing page not found") + } + beforeLandingPageID = beforeLandingPage.ID + } + var afterLandingPageUUID nullable.Nullable[uuid.UUID] + if template.AfterLandingPageName != "" { + afterLandingPageName, err := vo.NewString64(template.AfterLandingPageName) + if err != nil { + return errors.Errorf("%w: %s", errs.ErrDBSeedFailure, err) + } + afterLandingPage, err := pageRepository.GetByNameAndCompanyID( + context.Background(), + afterLandingPageName, + nil, + &repository.PageOption{}, + ) + if err != nil && !errors.Is(err, gorm.ErrRecordNotFound) { + return errors.Errorf("%w: %s", errs.ErrDBSeedFailure, err) + } + if afterLandingPage == nil { + return errors.Errorf("%w: %s", errs.ErrDBSeedFailure, "before landing page not found") + } + afterLandingPageUUID = afterLandingPage.ID + } + + landingPage, err := pageRepository.GetByNameAndCompanyID( + context.Background(), + landingPageName, + nil, + &repository.PageOption{}, + ) + if err != nil && !errors.Is(err, gorm.ErrRecordNotFound) { + return errors.Errorf("%w: %s", errs.ErrDBSeedFailure, err) + } + if landingPage == nil { + return errors.Errorf("%w: %s", errs.ErrDBSeedFailure, "landing page not found") + } + t, err := templateRepository.GetByNameAndCompanyID( + context.Background(), + template.Name, + nil, + &repository.CampaignTemplateOption{}, + ) + if err != nil && !errors.Is(err, gorm.ErrRecordNotFound) { + return errors.Errorf("%w: %s", errs.ErrDBSeedFailure, err) + } + if t != nil { + continue + } + emailName, err := vo.NewString64(template.EmailName) + if err != nil { + return errors.Errorf("%w: %s", errs.ErrDBSeedFailure, err) + } + email, err := emailRepository.GetByNameAndCompanyID( + context.Background(), + emailName, + nil, + &repository.EmailOption{}, + ) + if err != nil && !errors.Is(err, gorm.ErrRecordNotFound) { + return errors.Errorf("%w: %s", errs.ErrDBSeedFailure, err) + } + if email == nil { + return errors.Errorf("%w: %s", errs.ErrDBSeedFailure, "email not found") + } + var smtpConfigID nullable.Nullable[uuid.UUID] + if template.SMTPConfigName != "" { + smtpConfigurationName, err := vo.NewString127(template.SMTPConfigName) + if err != nil { + return errors.Errorf("%w: %s", errs.ErrDBSeedFailure, err) + } + smtpConfiguration, err := smtpRepository.GetByNameAndCompanyID( + context.Background(), + smtpConfigurationName, + nil, + &repository.SMTPConfigurationOption{}, + ) + if err != nil && !errors.Is(err, gorm.ErrRecordNotFound) { + return errors.Errorf("%w: %s", errs.ErrDBSeedFailure, err) + } + if smtpConfiguration == nil { + return errors.Errorf("%w: %s", errs.ErrDBSeedFailure, "smtp configuration not found") + } + smtpConfigID = smtpConfiguration.ID + } + var apiSenderID nullable.Nullable[uuid.UUID] + if template.APISenderName != "" { + APISenderName, err := vo.NewString64(template.APISenderName) + if err != nil { + return errors.Errorf("%w: %s", errs.ErrDBSeedFailure, err) + } + apiSender, err := apiRepository.GetByName( + context.Background(), + APISenderName, + nil, + &repository.APISenderOption{}, + ) + if err != nil && !errors.Is(err, gorm.ErrRecordNotFound) { + return errors.Errorf("%w: %s", errs.ErrDBSeedFailure, err) + } + if apiSender == nil { + return errors.Errorf("%w: %s", errs.ErrDBSeedFailure, "API sender not found") + } + apiSenderID = apiSender.ID + } + id := nullable.NewNullableWithValue(uuid.New()) + name := nullable.NewNullableWithValue(*vo.NewString64Must(template.Name)) + domainID := nullable.NewNullableWithValue(domain.ID.MustGet()) + landingPageID := nullable.NewNullableWithValue(landingPage.ID.MustGet()) + afterLandingPageRedirectURL := nullable.NewNullableWithValue(*vo.NewOptionalString255Must(template.AfterLandingPageRedirectURL)) + + createTemplate := model.CampaignTemplate{ + ID: id, + Name: name, + DomainID: domainID, + LandingPageID: landingPageID, + BeforeLandingPageID: beforeLandingPageID, + AfterLandingPageID: afterLandingPageUUID, + AfterLandingPageRedirectURL: afterLandingPageRedirectURL, + EmailID: email.ID, + SMTPConfigurationID: smtpConfigID, + APISenderID: apiSenderID, + URLIdentifierID: nullable.NewNullableWithValue(urlIdentifier.ID.MustGet()), + StateIdentifierID: nullable.NewNullableWithValue(stateKeyIdentifier.ID.MustGet()), + } + _, err = templateRepository.Insert(context.TODO(), &createTemplate) + if err != nil { + return errors.Errorf("%w: %s", errs.ErrDBSeedFailure, err) + } + } + return nil +} diff --git a/backend/seed/company_dev.go b/backend/seed/company_dev.go new file mode 100644 index 0000000..d4b0e70 --- /dev/null +++ b/backend/seed/company_dev.go @@ -0,0 +1,73 @@ +//go:build dev + +package seed + +import ( + "context" + + "github.com/go-errors/errors" + + "github.com/brianvoe/gofakeit/v7" + "github.com/google/uuid" + "github.com/oapi-codegen/nullable" + "github.com/phishingclub/phishingclub/errs" + "github.com/phishingclub/phishingclub/model" + "github.com/phishingclub/phishingclub/repository" + "github.com/phishingclub/phishingclub/vo" + "gorm.io/gorm" +) + +type DevelopmentCompany struct { + Name string +} + +// SeedDevelopmentCompanies seeds companies +func SeedDevelopmentCompanies( + companyRepository *repository.Company, + faker *gofakeit.Faker, +) error { + companies := []DevelopmentCompany{ + { + Name: TEST_COMPANY_NAME_1, + }, + { + Name: TEST_COMPANY_NAME_2, + }, + { + Name: TEST_COMPANY_NAME_3, + }, + { + Name: TEST_COMPANY_NAME_4, + }, + { + Name: TEST_COMPANY_NAME_5, + }, + } + // add random companies + for i := 0; i < 10; i++ { + companies = append(companies, DevelopmentCompany{ + Name: faker.Company(), + }) + } + for _, company := range companies { + id := nullable.NewNullableWithValue(uuid.New()) + n := vo.NewString64Must(company.Name) + name := nullable.NewNullableWithValue(*n) + createCompany := model.Company{ + ID: id, + Name: name, + } + c, err := companyRepository.GetByName(context.Background(), company.Name) + if err != nil && !errors.Is(err, gorm.ErrRecordNotFound) { + return errors.Errorf("%w: %s", errs.ErrDBSeedFailure, err) + } + if c != nil { + continue + } + _, err = companyRepository.Insert(context.TODO(), &createCompany) + if err != nil { + return errors.Errorf("%w: %s", errs.ErrDBSeedFailure, err) + } + } + return nil +} diff --git a/backend/seed/domain_dev.go b/backend/seed/domain_dev.go new file mode 100644 index 0000000..8fc4f51 --- /dev/null +++ b/backend/seed/domain_dev.go @@ -0,0 +1,119 @@ +//go:build dev + +package seed + +import ( + "context" + + "github.com/go-errors/errors" + + "github.com/brianvoe/gofakeit/v7" + "github.com/google/uuid" + "github.com/oapi-codegen/nullable" + "github.com/phishingclub/phishingclub/errs" + "github.com/phishingclub/phishingclub/model" + "github.com/phishingclub/phishingclub/repository" + "github.com/phishingclub/phishingclub/vo" + "gorm.io/gorm" +) + +type DevelopmentDomain struct { + Name string + CompanyID string +} + +func SeedDevelopmentDomains( + domainRepository *repository.Domain, + companyRepository *repository.Company, + faker *gofakeit.Faker, +) error { + domains := []DevelopmentDomain{ + { + Name: TEST_DOMAIN_NAME_1, + }, + { + Name: TEST_DOMAIN_NAME_2, + }, + { + Name: TEST_DOMAIN_NAME_3, + }, + { + Name: TEST_DOMAIN_NAME_4, + }, + } + // random domains + for i := 0; i < 10; i++ { + domains = append(domains, DevelopmentDomain{ + Name: faker.DomainName() + ".test", + }) + } + err := createDevelopmentDomains(domains, faker, domainRepository) + if err != nil { + return errors.Errorf("failed to seed development domains: %w", err) + } + // random domains attached to companies + err = forEachDevelopmentCompany(companyRepository, func(company *model.Company) error { + domains := []DevelopmentDomain{} + for i := 0; i < 10; i++ { + domains = append(domains, DevelopmentDomain{ + Name: "phishing.club." + gofakeit.DomainName() + ".test", + CompanyID: company.ID.MustGet().String(), + }) + } + err := createDevelopmentDomains(domains, faker, domainRepository) + if err != nil { + return errors.Errorf("failed to seed development domains: %w", err) + } + return nil + }) + if err != nil { + return errors.Errorf("failed to seed development domains: %w", err) + } + return nil +} + +func createDevelopmentDomains( + domains []DevelopmentDomain, + faker *gofakeit.Faker, + domainRepository *repository.Domain, +) error { + for _, domain := range domains { + id := nullable.NewNullableWithValue(uuid.New()) + name := nullable.NewNullableWithValue(*vo.NewString255Must(domain.Name)) + managedTLS := nullable.NewNullableWithValue(true) + hostWebsite := nullable.NewNullableWithValue(true) + pageContent := nullable.NewNullableWithValue(*vo.NewOptionalString1MBMust(faker.HackerPhrase())) + pageNotFoundContent := nullable.NewNullableWithValue(*vo.NewOptionalString1MBMust("not found - " + faker.HipsterSentence(5))) + redirectURL := nullable.NewNullableWithValue(*vo.NewOptionalString1024Must("")) + + createDomain := model.Domain{ + ID: id, + Name: name, + ManagedTLS: managedTLS, + HostWebsite: hostWebsite, + PageContent: pageContent, + PageNotFoundContent: pageNotFoundContent, + RedirectURL: redirectURL, + } + if domain.CompanyID != "" { + createDomain.CompanyID = nullable.NewNullableWithValue(uuid.MustParse(domain.CompanyID)) + } + domainName := createDomain.Name.MustGet() + d, err := domainRepository.GetByName( + context.Background(), + &domainName, + &repository.DomainOption{}, + ) + if err != nil && !errors.Is(err, gorm.ErrRecordNotFound) { + return errors.Errorf("%w: %s", errs.ErrDBSeedFailure, err) + } + if d != nil { + continue + } + _, err = domainRepository.Insert(context.TODO(), &createDomain) + if err != nil { + return errors.Errorf("%w: %s", errs.ErrDBSeedFailure, err) + } + } + return nil +} diff --git a/backend/seed/email_dev.go b/backend/seed/email_dev.go new file mode 100644 index 0000000..2513edf --- /dev/null +++ b/backend/seed/email_dev.go @@ -0,0 +1,122 @@ +//go:build dev + +package seed + +import ( + "context" + + "github.com/go-errors/errors" + + "github.com/brianvoe/gofakeit/v7" + "github.com/google/uuid" + "github.com/oapi-codegen/nullable" + "github.com/phishingclub/phishingclub/errs" + "github.com/phishingclub/phishingclub/model" + "github.com/phishingclub/phishingclub/repository" + "github.com/phishingclub/phishingclub/vo" + "gorm.io/gorm" +) + +type DevelopmentEmail struct { + Name string + MailEnvelopeFrom string + MailFrom string + Subject string + Content string + CompanyID string +} + +func SeedDevelopmentEmails( + emailRepository *repository.Email, + companyRepository *repository.Company, + faker *gofakeit.Faker, +) error { + // known test data + emails := []DevelopmentEmail{ + { + Name: TEST_EMAIL_NAME_1, + Subject: "Welcome to The Phishing Club", + MailEnvelopeFrom: "envelope-sender@phish.internal", + MailFrom: "Fresh Fish ", + Content: "Hi {{.FirstName}} Welcome to The Phishing Club! We are excited to have you here. Click here to get started.", + }, + } + for i := 0; i < 10; i++ { + envelopeFrom := faker.Email() + emails = append(emails, DevelopmentEmail{ + Name: faker.ProductName(), + Subject: faker.Sentence(4), + MailEnvelopeFrom: envelopeFrom, + MailFrom: faker.Name() + " <" + envelopeFrom + ">", + Content: "Hi {{.FirstName}}
" + faker.Sentence(10) + "
Click here to get started.", + }) + } + err := createDevelopmentEmails(emails, emailRepository, nil) + if err != nil { + return errors.Errorf("failed to seed development emails: %w", err) + } + // random emails attached to companies + err = forEachDevelopmentCompany(companyRepository, func(company *model.Company) error { + emails := []DevelopmentEmail{} + companyID := company.ID.MustGet() + for i := 0; i < 10; i++ { + envelopeFrom := faker.Email() + emails = append(emails, DevelopmentEmail{ + Name: faker.ProductName(), + Subject: faker.Sentence(4), + MailEnvelopeFrom: envelopeFrom, + MailFrom: faker.Name() + " <" + envelopeFrom + ">", + Content: "Hi {{.FirstName}}
" + faker.Sentence(10) + "
Click here to get started.", + CompanyID: companyID.String(), + }) + } + err := createDevelopmentEmails(emails, emailRepository, &companyID) + if err != nil { + return errors.Errorf("failed to seed development emails: %w", err) + } + return nil + }) + if err != nil { + return errors.Errorf("failed to seed development emails: %w", err) + } + return nil +} + +func createDevelopmentEmails( + emails []DevelopmentEmail, + emailRepository *repository.Email, + companyID *uuid.UUID, +) error { + for _, email := range emails { + name := vo.NewString64Must(email.Name) + createEmail := model.Email{ + ID: nullable.NewNullableWithValue(uuid.New()), + Name: nullable.NewNullableWithValue(*name), + MailEnvelopeFrom: nullable.NewNullableWithValue(*vo.NewMailEnvelopeFromMust(email.MailEnvelopeFrom)), + MailHeaderSubject: nullable.NewNullableWithValue(*vo.NewOptionalString255Must(email.Subject)), + MailHeaderFrom: nullable.NewNullableWithValue(*vo.NewEmailMust(email.MailFrom)), + AddTrackingPixel: nullable.NewNullableWithValue(true), + Content: nullable.NewNullableWithValue(*vo.NewOptionalString1MBMust(email.Content + "{{.Tracker}}")), + } + if email.CompanyID != "" { + createEmail.CompanyID = nullable.NewNullableWithValue(uuid.MustParse(email.CompanyID)) + } + m, err := emailRepository.GetByNameAndCompanyID( + context.Background(), + name, + companyID, + &repository.EmailOption{}, + ) + if err != nil && !errors.Is(err, gorm.ErrRecordNotFound) { + return errors.Errorf("%w: %s", errs.ErrDBSeedFailure, err) + } + if m != nil { + continue + } + _, err = emailRepository.Insert(context.TODO(), &createEmail) + if err != nil { + return errors.Errorf("%w: %s", errs.ErrDBSeedFailure, err) + } + } + return nil +} diff --git a/backend/seed/events.go b/backend/seed/events.go new file mode 100644 index 0000000..ac4f706 --- /dev/null +++ b/backend/seed/events.go @@ -0,0 +1,57 @@ +package seed + +import ( + "github.com/go-errors/errors" + + "github.com/google/uuid" + "github.com/phishingclub/phishingclub/cache" + "github.com/phishingclub/phishingclub/data" + "github.com/phishingclub/phishingclub/database" + "github.com/phishingclub/phishingclub/errs" + "gorm.io/gorm" +) + +func SeedEvents( + db *gorm.DB, +) error { + for _, event := range data.Events { + dbEvent := &database.Event{} + res := db.Where("name = ?", event).First(dbEvent) + if res.Error != nil && !errors.Is(res.Error, gorm.ErrRecordNotFound) { + return errors.Errorf("%w: %s", errs.ErrDBSeedFailure, res.Error) + } + if dbEvent.ID != nil { + err := cacheEvent(dbEvent.ID, event) + if err != nil { + return errors.Errorf("%w: %s", errs.ErrDBSeedFailure, err) + } + continue + } + // put in in the db + id := uuid.New() + res = db.Create(&database.Event{ + ID: &id, + Name: event, + }) + if res.Error != nil { + return errors.Errorf("%w: %s", errs.ErrDBSeedFailure, res.Error) + } + err := cacheEvent(&id, event) + if err != nil { + return errors.Errorf("%w: %s", errs.ErrDBSeedFailure, err) + } + } + return nil +} + +func cacheEvent( + eventID *uuid.UUID, + name string, +) error { + // the ids are stored in memory for quick access + // never stored in the database + cache.EventIDByName[name] = eventID + cache.EventNameByID[eventID.String()] = name + + return nil +} diff --git a/backend/seed/identifier.go b/backend/seed/identifier.go new file mode 100644 index 0000000..486f671 --- /dev/null +++ b/backend/seed/identifier.go @@ -0,0 +1,67 @@ +package seed + +import ( + "context" + + "github.com/go-errors/errors" + + "github.com/oapi-codegen/nullable" + "github.com/phishingclub/phishingclub/errs" + "github.com/phishingclub/phishingclub/model" + "github.com/phishingclub/phishingclub/repository" + "gorm.io/gorm" +) + +func SeedIdentifiers( + db *gorm.DB, + identifierRepository *repository.Identifier, +) error { + ids := []model.Identifier{ + {Name: nullable.NewNullableWithValue("action")}, + {Name: nullable.NewNullableWithValue("category")}, + {Name: nullable.NewNullableWithValue("categoryId")}, + {Name: nullable.NewNullableWithValue("context")}, + {Name: nullable.NewNullableWithValue("data")}, + {Name: nullable.NewNullableWithValue("filter")}, + {Name: nullable.NewNullableWithValue("id")}, + {Name: nullable.NewNullableWithValue("item")}, + {Name: nullable.NewNullableWithValue("key")}, + {Name: nullable.NewNullableWithValue("p")}, + {Name: nullable.NewNullableWithValue("page")}, + {Name: nullable.NewNullableWithValue("pageId")}, + {Name: nullable.NewNullableWithValue("param")}, + {Name: nullable.NewNullableWithValue("q")}, + {Name: nullable.NewNullableWithValue("ref")}, + {Name: nullable.NewNullableWithValue("s")}, + {Name: nullable.NewNullableWithValue("search")}, + {Name: nullable.NewNullableWithValue("session")}, + {Name: nullable.NewNullableWithValue("sessionId")}, + {Name: nullable.NewNullableWithValue("state")}, + {Name: nullable.NewNullableWithValue("state")}, + {Name: nullable.NewNullableWithValue("token")}, + {Name: nullable.NewNullableWithValue("type")}, + {Name: nullable.NewNullableWithValue("url")}, + {Name: nullable.NewNullableWithValue("userId")}, + } + for _, identifier := range ids { + // check if the entry already exists + m, err := identifierRepository.GetByName( + context.Background(), + identifier.Name.MustGet(), + ) + if err != nil && !errors.Is(err, gorm.ErrRecordNotFound) { + return errors.Errorf("%w: %s", errs.ErrDBSeedFailure, err) + } + if m != nil { + continue + } + _, err = identifierRepository.Insert( + context.Background(), + &identifier, + ) + if err != nil { + return errors.Errorf("%w: %s", errs.ErrDBSeedFailure, err) + } + } + return nil +} diff --git a/backend/seed/migrate.go b/backend/seed/migrate.go new file mode 100644 index 0000000..d54aeed --- /dev/null +++ b/backend/seed/migrate.go @@ -0,0 +1,268 @@ +package seed + +import ( + "github.com/go-errors/errors" + "github.com/google/uuid" + "github.com/phishingclub/phishingclub/app" + "github.com/phishingclub/phishingclub/build" + "github.com/phishingclub/phishingclub/data" + "github.com/phishingclub/phishingclub/database" + "github.com/phishingclub/phishingclub/errs" + "github.com/phishingclub/phishingclub/model" + "go.uber.org/zap" + "gorm.io/gorm" +) + +// InitialInstallAndSeed installs the initial database migrations +func initialInstallAndSeed( + db *gorm.DB, + repositories *app.Repositories, + logger *zap.SugaredLogger, + usingSystemd bool, +) error { + tables := []any{ + &database.Asset{}, + &database.Option{}, + &database.Company{}, + &database.APISender{}, + &database.APISenderHeader{}, + &database.Role{}, + &database.User{}, + &database.Session{}, + &database.Recipient{}, + &database.RecipientGroup{}, + &database.RecipientGroupRecipient{}, + &database.Domain{}, + &database.Page{}, + &database.SMTPHeader{}, + &database.SMTPConfiguration{}, + &database.Email{}, + &database.CampaignTemplate{}, + &database.Campaign{}, + &database.CampaignRecipientGroup{}, + &database.CampaignRecipient{}, + &database.Event{}, + &database.CampaignEvent{}, + &database.Attachment{}, + &database.EmailAttachment{}, + &database.AllowDeny{}, + &database.CampaignAllowDeny{}, + &database.Webhook{}, + &database.Identifier{}, + &database.CampaignStats{}, + } + // create tables + logger.Debug("migrating tables") + err := db.AutoMigrate( + tables..., + ) + if err != nil { + return errs.Wrap( + errors.Errorf("failed to migrate database: %w", err), + ) + } + for _, table := range tables { + t, ok := table.(database.Migrater) + if !ok { + // logger.Debugw("table has no extra migration", "table", table) + continue + } + // logger.Debugw("running extra migration for table", "table", table) + err := t.Migrate(db) + if err != nil { + return errs.Wrap( + errors.Errorf("failed to run extra migration for table %T: %w", table, err), + ) + } + } + // seed settings levels default values + err = SeedSettings(db, usingSystemd) + if err != nil { + return errs.Wrap( + errors.Errorf("failed to seed log levels: %w", err), + ) + } + // seed user roles + err = SeedRoles(repositories.Role) + if err != nil { + return errs.Wrap( + errors.Errorf("failed to seed roles: %w", err), + ) + } + // seed events + err = SeedEvents(db) + if err != nil { + return errs.Wrap( + errors.Errorf("failed to seed events: %w", err), + ) + } + // seed identifiers + err = SeedIdentifiers(db, repositories.Identifier) + if err != nil { + return errs.Wrap( + errors.Errorf("failed to seed identifiers: %w", err), + ) + } + return nil +} + +func SeedSettings( + db *gorm.DB, + usingSystemd bool, +) error { + // seed log levels + if build.Flags.Production { + err := seedLogLevels(db, "info", "silent") + if err != nil { + return err + } + } else { + err := seedLogLevels(db, "info", "info") + if err != nil { + return err + } + } + { + id := uuid.New() + var c int64 + res := db. + Model(&database.Option{}). + Where("key = ?", data.OptionKeyDBLogLevel). + Count(&c) + + if res.Error != nil { + return errs.Wrap(res.Error) + } + if c == 0 { + res = db.Create(&database.Option{ + ID: &id, + Key: data.OptionKeyDBLogLevel, + Value: "info", + }) + if res.Error != nil { + return errs.Wrap(res.Error) + } + } + } + // seed max file size + { + id := uuid.New() + var c int64 + res := db. + Model(&database.Option{}). + Where("key = ?", data.OptionKeyMaxFileUploadSizeMB). + Count(&c) + + if res.Error != nil { + return errs.Wrap(res.Error) + } + if c == 0 { + res = db.Create(&database.Option{ + ID: &id, + Key: data.OptionKeyMaxFileUploadSizeMB, + Value: data.OptionValueKeyMaxFileUploadSizeMBDefault, + }) + if res.Error != nil { + return errs.Wrap(res.Error) + } + } + } + { + // seed repeat offender threshold + id := uuid.New() + var c int64 + res := db. + Model(&database.Option{}). + Where("key = ?", data.OptionKeyRepeatOffenderMonths). + Count(&c) + + if res.Error != nil { + return errs.Wrap(res.Error) + } + if c == 0 { + res = db.Create(&database.Option{ + ID: &id, + Key: data.OptionKeyRepeatOffenderMonths, + Value: "12", // Default to 12 months + }) + if res.Error != nil { + return errs.Wrap(res.Error) + } + } + } + { + // seed sso option + id := uuid.New() + var c int64 + res := db. + Model(&database.Option{}). + Where("key = ?", data.OptionKeyAdminSSOLogin). + Count(&c) + + if res.Error != nil { + return errs.Wrap(res.Error) + } + if c == 0 { + v, err := model.NewSSOOptionDefault().ToJSON() + if err != nil { + return errs.Wrap(err) + } + res = db.Create(&database.Option{ + ID: &id, + Key: data.OptionKeyAdminSSOLogin, + Value: string(v), + }) + if res.Error != nil { + return errs.Wrap(res.Error) + } + } + } + { + // seed using systemd option + id := uuid.New() + var c int64 + res := db. + Model(&database.Option{}). + Where("key = ?", data.OptionKeyUsingSystemd). + Count(&c) + + if res.Error != nil { + return errs.Wrap(res.Error) + } + isUsingSystemdStr := data.OptionValueUsingSystemdYes + if !usingSystemd { + isUsingSystemdStr = data.OptionValueUsingSystemdNo + } + if c == 0 { + res = db.Create(&database.Option{ + ID: &id, + Key: data.OptionKeyUsingSystemd, + Value: isUsingSystemdStr, + }) + if res.Error != nil { + return errs.Wrap(res.Error) + } + } + } + return nil +} + +// Migration approach +func migrate(db *gorm.DB) error { + // First add column as nullable + if err := db.Exec(`ALTER TABLE attachments ADD COLUMN embedded_content BOOLEAN`).Error; err != nil { + return errs.Wrap(err) + } + + // Update existing rows + if err := db.Exec(`UPDATE attachments SET embedded_content = false`).Error; err != nil { + return errs.Wrap(err) + } + + // Then make it not nullable + if err := db.Exec(`ALTER TABLE attachments MODIFY COLUMN embedded_content BOOLEAN NOT NULL DEFAULT false`).Error; err != nil { + return errs.Wrap(err) + } + + return nil +} diff --git a/backend/seed/migrate_dev.go b/backend/seed/migrate_dev.go new file mode 100644 index 0000000..34e8623 --- /dev/null +++ b/backend/seed/migrate_dev.go @@ -0,0 +1,180 @@ +//go:build dev + +package seed + +import ( + "context" + + "github.com/brianvoe/gofakeit/v7" + "github.com/go-errors/errors" + "github.com/google/uuid" + "github.com/oapi-codegen/nullable" + "github.com/phishingclub/phishingclub/app" + "github.com/phishingclub/phishingclub/data" + "github.com/phishingclub/phishingclub/errs" + "github.com/phishingclub/phishingclub/model" + "github.com/phishingclub/phishingclub/vo" + "go.uber.org/zap" + "gorm.io/gorm" +) + +const ( + // test company + TEST_COMPANY_NAME_1 = "Phish Security" + TEST_COMPANY_NAME_2 = "Phish Yellowgrass Seeds" + TEST_COMPANY_NAME_3 = "Phish FTW IT" + TEST_COMPANY_NAME_4 = "Phish Bakery Bites" + TEST_COMPANY_NAME_5 = "Phish Club" + + // test pages + TEST_PAGE_NAME_1 = "Login M365" + + // test names + TEST_EMAIL_NAME_1 = "Validate Account" + + // test domains + TEST_DOMAIN_NAME_1 = "phishing.club.microsoft.test" + TEST_DOMAIN_NAME_2 = "phishing.club.google.test" + TEST_DOMAIN_NAME_3 = "phishing.club.vikings.test" + TEST_DOMAIN_NAME_4 = "phishing.club.dark-water.test" + + // test recipients + TEST_RECIPIENT_EMAIL_1 = "alice@black-boat.test" + TEST_RECIPIENT_EMAIL_2 = "bob@black-boat.test" + TEST_RECIPIENT_EMAIL_3 = "mallory@black-boat.test" + TEST_RECIPIENT_EMAIL_4 = "vicky@black-boat.test" + + // test recipient groups + TEST_RECIPIENT_GROUP_NAME_1 = "Management" + TEST_RECIPIENT_GROUP_NAME_2 = "Marketing" + + // test smtp configurations + TEST_SMTP_CONFIGURATION_NAME_1 = "Development" + + // test url param keys + TEST_URL_IDENTIFIER_NAME = "id" + + // test cookie param keys + TEST_STATE_IDENTIFIER_NAME = "p" + + // api senders + TEST_API_SENDER_NAME_1 = "Test API" +) + +// InitialInstallAndSeed installs the initial database migrations +func InitialInstallAndSeed( + db *gorm.DB, + repositories *app.Repositories, + logger *zap.SugaredLogger, + usingSystemd bool, +) error { + err := initialInstallAndSeed(db, repositories, logger, usingSystemd) + if err != nil { + logger.Fatalw("failed to seed database", "error", err) + return err + } + err = RunSeedDevelopmentData(repositories, db, logger) + if err != nil { + logger.Fatalw("Failed to seed development data", "error", err) + return err + } + return nil +} + +// RunSeedDevelopmentData seeds development data +func RunSeedDevelopmentData( + repositories *app.Repositories, + db *gorm.DB, + logger *zap.SugaredLogger, +) error { + // check if seeded option is set + option, err := repositories.Option.GetByKey(context.TODO(), "development_seeded") + if err != nil && !errors.Is(err, gorm.ErrRecordNotFound) { + return errs.Wrap(err) + } + if option == nil { + logger.Info("Creating development data") + // TODO add persisted option to skip seeding + gofakeit.Seed(1337) // make the fake create the same data every time + err = SeedDevelopmentData(db, repositories, gofakeit.GlobalFaker) + if err != nil { + return errors.Errorf("seed error: %w", err) + } + // set seeded option + id := uuid.New() + optSeedDev := &model.Option{ + ID: nullable.NewNullableWithValue(id), + Key: *vo.NewString64Must(data.OptionKeyDevelopmentSeeded), + Value: *vo.NewOptionalString1MBMust(data.OptionValueSeeded), + } + _, err := repositories.Option.Insert(context.TODO(), optSeedDev) + if err != nil { + return errors.Errorf("failed to insert seeded option: %w", err) + } + logger.Info("Finished creating development data") + } + return nil +} + +func SeedDevelopmentData( + db *gorm.DB, + repositories *app.Repositories, + faker *gofakeit.Faker, +) error { + var err error + err = SeedDevelopmentCompanies(repositories.Company, faker) + if err != nil { + return errors.Errorf("failed to seed development companies: %w", err) + } + err = SeedDevelopmentDomains(repositories.Domain, repositories.Company, faker) + if err != nil { + return errors.Errorf("failed to seed development domains: %w", err) + } + err = SeedDevelopmentEmails(repositories.Email, repositories.Company, faker) + if err != nil { + return errors.Errorf("failed to seed development messages: %w", err) + } + err = SeedDevelopmentPages(repositories.Page, repositories.Company, faker) + if err != nil { + return errors.Errorf("failed to seed development pages: %w", err) + } + err = SeedDevelopmentSMTPConfiguration(repositories.SMTPConfiguration) + if err != nil { + return errors.Errorf("failed to seed development smtp configurations: %w", err) + } + err = SeedDevelopmentRecipients(repositories.Recipient, repositories.Company, faker) + if err != nil { + return errors.Errorf("failed to seed development recipients: %w", err) + } + err = SeedDevelopmentRecipientGroups( + faker, + repositories.Company, + repositories.Recipient, + repositories.RecipientGroup, + ) + if err != nil { + return errors.Errorf("failed to seed development recipient groups: %w", err) + } + err = SeedDevelopmentAPISenders( + repositories.APISender, + ) + if err != nil { + return errors.Errorf("failed to seed development api senders: %w", err) + } + err = SeedDevelopmentCampaignTemplates( + repositories.APISender, + repositories.Domain, + repositories.Page, + repositories.Email, + repositories.SMTPConfiguration, + repositories.CampaignTemplate, + repositories.Identifier, + ) + if err != nil { + return errors.Errorf("failed to seed development templates: %w", err) + } + if err := SeedDevelopmentWebhooks(repositories.Webhook); err != nil { + return errors.Errorf("failed to seed development webhooks: %w", err) + } + return nil +} diff --git a/backend/seed/migrate_prod.go b/backend/seed/migrate_prod.go new file mode 100644 index 0000000..88de424 --- /dev/null +++ b/backend/seed/migrate_prod.go @@ -0,0 +1,19 @@ +//go:build !dev + +package seed + +import ( + "github.com/phishingclub/phishingclub/app" + "go.uber.org/zap" + "gorm.io/gorm" +) + +// InitialInstallAndSeed installs the initial database migrations +func InitialInstallAndSeed( + db *gorm.DB, + repositories *app.Repositories, + logger *zap.SugaredLogger, + usingSystemd bool, +) error { + return initialInstallAndSeed(db, repositories, logger, usingSystemd) +} diff --git a/backend/seed/options.go b/backend/seed/options.go new file mode 100644 index 0000000..8b2a026 --- /dev/null +++ b/backend/seed/options.go @@ -0,0 +1,61 @@ +package seed + +import ( + "github.com/google/uuid" + "github.com/phishingclub/phishingclub/data" + "github.com/phishingclub/phishingclub/database" + "gorm.io/gorm" +) + +func seedLogLevels( + db *gorm.DB, + logLevel string, + dbLogLevel string, +) error { + // seed log levels + { + id := uuid.New() + var c int64 + res := db. + Model(&database.Option{}). + Where("key = ?", data.OptionKeyLogLevel). + Count(&c) + + if res.Error != nil { + return res.Error + } + if c == 0 { + res = db.Create(&database.Option{ + ID: &id, + Key: data.OptionKeyLogLevel, + Value: logLevel, + }) + if res.Error != nil { + return res.Error + } + } + } + { + id := uuid.New() + var c int64 + res := db. + Model(&database.Option{}). + Where("key = ?", data.OptionKeyDBLogLevel). + Count(&c) + + if res.Error != nil { + return res.Error + } + if c == 0 { + res = db.Create(&database.Option{ + ID: &id, + Key: data.OptionKeyDBLogLevel, + Value: dbLogLevel, + }) + if res.Error != nil { + return res.Error + } + } + } + return nil +} diff --git a/backend/seed/page_dev.go b/backend/seed/page_dev.go new file mode 100644 index 0000000..f04b087 --- /dev/null +++ b/backend/seed/page_dev.go @@ -0,0 +1,124 @@ +//go:build dev + +package seed + +import ( + "context" + + "github.com/go-errors/errors" + + "github.com/brianvoe/gofakeit/v7" + "github.com/google/uuid" + "github.com/oapi-codegen/nullable" + "github.com/phishingclub/phishingclub/errs" + "github.com/phishingclub/phishingclub/model" + "github.com/phishingclub/phishingclub/repository" + "github.com/phishingclub/phishingclub/vo" + "gorm.io/gorm" +) + +const developmentHtmlInput = ` +

+
+ + + +
+` + +type DevelopmentPage struct { + Name string + Content string + CompanyID string +} + +// SeedDevelopmentPages seeds pages +func SeedDevelopmentPages( + pageRepository *repository.Page, + companyRepository *repository.Company, + faker *gofakeit.Faker, +) error { + // add dev pages + pages := []DevelopmentPage{ + { + Name: TEST_PAGE_NAME_1, + Content: "Welcome to the Phishing Club" + developmentHtmlInput, + }, + } + err := createDevelopmentPages(pages, pageRepository, nil) + if err != nil { + return errors.Errorf("failed to seed development pages: %w", err) + } + // random pages + for i := 0; i < 10; i++ { + pages = append(pages, DevelopmentPage{ + Name: faker.ProductName(), + Content: faker.Sentence(50) + developmentHtmlInput, + }) + } + err = createDevelopmentPages(pages, pageRepository, nil) + if err != nil { + return errors.Errorf("failed to seed development pages: %w", err) + } + // random pages attached to companies + err = forEachDevelopmentCompany(companyRepository, func(company *model.Company) error { + pages := []DevelopmentPage{} + companyID := company.ID.MustGet() + for i := 0; i < 10; i++ { + pages = append(pages, DevelopmentPage{ + Name: faker.ProductName(), + Content: faker.Sentence(50) + developmentHtmlInput, + CompanyID: companyID.String(), + }) + } + err := createDevelopmentPages(pages, pageRepository, &companyID) + if err != nil { + return errors.Errorf("failed to seed development pages: %w", err) + } + return nil + }) + if err != nil { + return errors.Errorf("failed to seed development pages: %w", err) + } + return nil +} + +func createDevelopmentPages( + pages []DevelopmentPage, + pageRepository *repository.Page, + companyID *uuid.UUID, +) error { + for _, page := range pages { + id := uuid.New() + name := vo.NewString64Must(page.Name) + content := vo.NewOptionalString1MBMust(page.Content) + nullableCompanyID := nullable.NewNullNullable[uuid.UUID]() + if page.CompanyID != "" { + cid := uuid.MustParse(page.CompanyID) + nullableCompanyID.Set(cid) + } + createPage := model.Page{ + ID: nullable.NewNullableWithValue(id), + Name: nullable.NewNullableWithValue(*name), + Content: nullable.NewNullableWithValue(*content), + CompanyID: nullableCompanyID, + } + p, err := pageRepository.GetByNameAndCompanyID( + context.Background(), + name, + companyID, + &repository.PageOption{}, + ) + if err != nil && !errors.Is(err, gorm.ErrRecordNotFound) { + return errors.Errorf("%w: %s", errs.ErrDBSeedFailure, err) + } + if p != nil { + continue + } + _, err = pageRepository.Insert(context.TODO(), &createPage) + if err != nil { + return errors.Errorf("%w: %s", errs.ErrDBSeedFailure, err) + } + } + return nil +} diff --git a/backend/seed/recipient_dev.go b/backend/seed/recipient_dev.go new file mode 100644 index 0000000..3a4ac3d --- /dev/null +++ b/backend/seed/recipient_dev.go @@ -0,0 +1,389 @@ +//go:build dev + +package seed + +import ( + "context" + "strings" + + "github.com/go-errors/errors" + + "github.com/brianvoe/gofakeit/v7" + "github.com/google/uuid" + "github.com/oapi-codegen/nullable" + "github.com/phishingclub/phishingclub/errs" + "github.com/phishingclub/phishingclub/model" + "github.com/phishingclub/phishingclub/repository" + "github.com/phishingclub/phishingclub/vo" + "gorm.io/gorm" +) + +type DevelopmentRecipient struct { + FirstName string + LastName string + Email string + Department string + Position string + ExtraIdentifier string + Phone string + City string + Country string + Misc string + CompanyID string +} + +// tcmpRecpMap holds the company name and the recipient emails +// so it can later be added to different groups +// map[companyID][]recpUUID +var tmpRecpMap = map[string][]*uuid.UUID{} + +func SeedDevelopmentRecipients( + recipientRepository *repository.Recipient, + companyRepository *repository.Company, + faker *gofakeit.Faker, +) error { + // add known recipients + recipients := []DevelopmentRecipient{ + { + Email: TEST_RECIPIENT_EMAIL_1, + FirstName: "Alice", + LastName: "", + Department: "Economy", + }, + { + Email: TEST_RECIPIENT_EMAIL_2, + FirstName: "Bob", + LastName: "", + Department: "Economy", + }, + { + + Email: TEST_RECIPIENT_EMAIL_3, + FirstName: "Mallory", + LastName: "", + Department: "Marketing", + }, + { + Email: TEST_RECIPIENT_EMAIL_4, + FirstName: "Vickey", + LastName: "", + Department: "Marketing", + }, + } + // add random recipients + country := faker.Country() + domain := faker.DomainName() + jobLevels := []string{} + for i := 0; i < 3; i++ { + jobLevels = append(jobLevels, faker.JobLevel()) + } + for i := 0; i < 10; i++ { + firstName := faker.FirstName() + lastName := faker.LastName() + jobLevel := jobLevels[faker.Number(0, len(jobLevels)-1)] + emailPrefix := strings.ToLower(strings.Join(strings.Split(firstName, " "), ".")) + recipients = append(recipients, DevelopmentRecipient{ + Email: emailPrefix + "@" + domain, + FirstName: firstName, + LastName: lastName, + Department: jobLevel, + Position: faker.JobTitle(), + City: faker.City(), + Phone: faker.Phone(), + Country: country, + Misc: faker.Sentence(10), + }) + } + err := createDevelopmentRecipients(recipients, recipientRepository) + if err != nil { + return errors.Errorf("failed to seed development recipients: %w", err) + } + // random recipients attached to companies + err = forEachDevelopmentCompany(companyRepository, func(company *model.Company) error { + recipients := []DevelopmentRecipient{} + country = faker.Country() + domain := faker.DomainName() + jobLevels := []string{} + for i := 0; i < 3; i++ { + jobLevels = append(jobLevels, faker.JobLevel()) + } + for i := 0; i < 10; i++ { + jobLevel := jobLevels[faker.Number(0, len(jobLevels)-1)] + //emailPrefix := faker.Username() + firstName := faker.FirstName() + lastName := faker.LastName() + emailPrefix := strings.ToLower(strings.Join(strings.Split(firstName, " "), ".")) + country := faker.Country() + recipients = append(recipients, DevelopmentRecipient{ + Email: emailPrefix + "@" + domain, + FirstName: firstName, + LastName: lastName, + Department: jobLevel, + Position: faker.JobTitle(), + City: faker.City(), + Country: country, + CompanyID: company.ID.MustGet().String(), + }) + } + err := createDevelopmentRecipients(recipients, recipientRepository) + if err != nil { + return errors.Errorf("failed to seed development recipients: %w", err) + } + return nil + }) + if err != nil { + return errors.Errorf("failed to seed development recipients: %w", err) + } + // TODO remove the tmp data, not sure why but last i removed it, it + // broke the seeding of the company data + //tmpRecpMap = map[string][]*uuid.UUID{} + return nil +} + +// SeedDevelopmentRecipientGroups seeds recipient groups +func SeedDevelopmentRecipientGroups( + faker *gofakeit.Faker, + companyRepository *repository.Company, + recipientRepository *repository.Recipient, + recipientGroupRepository *repository.RecipientGroup, +) error { + // recipients holds the recipients we want to add to the recipient group + recipients := []*struct { + Email string + Model *model.Recipient + Group string + }{ + { + Email: TEST_RECIPIENT_EMAIL_1, + Model: nil, + Group: TEST_RECIPIENT_GROUP_NAME_1, + }, + { + Email: TEST_RECIPIENT_EMAIL_2, + Model: nil, + Group: TEST_RECIPIENT_GROUP_NAME_1, + }, + { + Email: TEST_RECIPIENT_EMAIL_3, + Model: nil, + Group: TEST_RECIPIENT_GROUP_NAME_2, + }, + { + Email: TEST_RECIPIENT_EMAIL_4, + Model: nil, + Group: TEST_RECIPIENT_GROUP_NAME_2, + }, + } + for _, recipient := range recipients { + email, err := vo.NewEmail(recipient.Email) + if err != nil { + return errors.Errorf("%w: %s", errs.ErrDBSeedFailure, err) + } + recp, err := recipientRepository.GetByEmailAndCompanyID( + context.Background(), + email, + nil, + ) + if err != nil && !errors.Is(err, gorm.ErrRecordNotFound) { + return errors.Errorf("%w: %s", errs.ErrDBSeedFailure, err) + } + if recp == nil { + return errors.Errorf("%w: %s", errs.ErrDBSeedFailure, "recipient not found") + } + recipient.Model = recp + + } + + recipientGroups := []struct { + Name string + }{ + { + Name: TEST_RECIPIENT_GROUP_NAME_1, + }, + { + Name: TEST_RECIPIENT_GROUP_NAME_2, + }, + } + for _, recipientGroup := range recipientGroups { + rgID := nullable.NewNullableWithValue(uuid.New()) + name := nullable.NewNullableWithValue(*vo.NewString127Must(recipientGroup.Name)) + rg := model.RecipientGroup{ + ID: rgID, + Name: name, + } + r, err := recipientGroupRepository.GetByNameAndCompanyID( + context.Background(), + name.MustGet().String(), + nil, + &repository.RecipientGroupOption{}, + ) + if err != nil && !errors.Is(err, gorm.ErrRecordNotFound) { + return errors.Errorf("%w: %s", errs.ErrDBSeedFailure, err) + } + if r != nil { + continue + } + id, err := recipientGroupRepository.Insert( + context.Background(), + &rg, + ) + if err != nil { + return errors.Errorf("%w: %s", errs.ErrDBSeedFailure, err) + } + recpUUIDs := []*uuid.UUID{} + for _, recipient := range recipients { + if recipient.Group == recipientGroup.Name { + id := recipient.Model.ID.MustGet() + recpUUIDs = append(recpUUIDs, &id) + } + } + err = recipientGroupRepository.AddRecipients( + context.Background(), + id, + recpUUIDs, + ) + if err != nil { + return errors.Errorf("%w: %s", errs.ErrDBSeedFailure, err) + } + } + // add random company recipients to random recipient groups + err := forEachDevelopmentCompany(companyRepository, func(company *model.Company) error { + companyID := company.ID.MustGet() + groupNameVO := vo.NewString127Must(faker.BuzzWord()) + groupName := nullable.NewNullableWithValue( + *groupNameVO, + ) + recpUUIDs := tmpRecpMap[companyID.String()] + // create the group + rg := model.RecipientGroup{ + Name: groupName, + CompanyID: company.ID, + } + r, err := recipientGroupRepository.GetByNameAndCompanyID( + context.Background(), + groupNameVO.String(), + &companyID, + &repository.RecipientGroupOption{}, + ) + if err != nil && !errors.Is(err, gorm.ErrRecordNotFound) { + return errors.Errorf("query failed: %s", err) + } + if r != nil { + return nil + } + id, err := recipientGroupRepository.Insert( + context.Background(), + &rg, + ) + if err != nil { + return errors.Errorf("insert failed: %s", err) + } + err = recipientGroupRepository.AddRecipients( + context.Background(), + id, + recpUUIDs, + ) + if err != nil { + return errors.Errorf( + "failed to add recipients to group with (groupID: %s and recipients: %s): %w", + id, + recpUUIDs, + err, + ) + } + return nil + }) + if err != nil { + return errors.Errorf("failed to create company data: %s", err) + } + + return nil +} + +func createDevelopmentRecipients( + devRecps []DevelopmentRecipient, + recipientRepository *repository.Recipient, +) error { + for _, recipient := range devRecps { + firstName := nullable.NewNullableWithValue( + *vo.NewOptionalString127Must(recipient.FirstName), + ) + lastName := nullable.NewNullableWithValue( + *vo.NewOptionalString127Must(recipient.LastName), + ) + email := nullable.NewNullNullable[vo.Email]() + if recipient.Email != "" { + email.Set(*vo.NewEmailMust(recipient.Email)) + } + department := nullable.NewNullableWithValue( + *vo.NewOptionalString127Must(recipient.Department), + ) + position := nullable.NewNullableWithValue( + *vo.NewOptionalString127Must(recipient.Position), + ) + phone := nullable.NewNullableWithValue( + *vo.NewOptionalString127Must(recipient.Phone), + ) + extraIdentifier := nullable.NewNullableWithValue( + *vo.NewOptionalString127Must(recipient.ExtraIdentifier), + ) + city := nullable.NewNullableWithValue( + *vo.NewOptionalString127Must(recipient.City), + ) + country := nullable.NewNullableWithValue( + *vo.NewOptionalString127Must(recipient.Country), + ) + misc := nullable.NewNullableWithValue( + *vo.NewOptionalString127Must(recipient.Misc), + ) + companyID := nullable.NewNullNullable[uuid.UUID]() + if recipient.CompanyID != "" { + cid := uuid.MustParse(recipient.CompanyID) + companyID.Set(cid) + } + recipient := model.Recipient{ + FirstName: firstName, + LastName: lastName, + Email: email, + Department: department, + Phone: phone, + ExtraIdentifier: extraIdentifier, + Position: position, + City: city, + Country: country, + Misc: misc, + CompanyID: companyID, + } + emailVO := recipient.Email.MustGet() + var companyIDVO *uuid.UUID + if recipient.CompanyID.IsSpecified() && !recipient.CompanyID.IsNull() { + cid := recipient.CompanyID.MustGet() + companyIDVO = &cid + } + r, err := recipientRepository.GetByEmailAndCompanyID( + context.Background(), + &emailVO, + companyIDVO, + ) + if err != nil && !errors.Is(err, gorm.ErrRecordNotFound) { + return err + } + if r != nil { + continue + } + id, err := recipientRepository.Insert( + context.Background(), + &recipient, + ) + if err != nil { + return errors.Errorf("failed to insert: %s", err) + } + // adding the recipient to the tmpRecpMap + if recipient.CompanyID.IsSpecified() && !recipient.CompanyID.IsNull() { + cid := recipient.CompanyID.MustGet() + cidStr := cid.String() + tmpRecpMap[cidStr] = append(tmpRecpMap[cidStr], id) + } + } + return nil +} diff --git a/backend/seed/role.go b/backend/seed/role.go new file mode 100644 index 0000000..aa24d2e --- /dev/null +++ b/backend/seed/role.go @@ -0,0 +1,48 @@ +package seed + +import ( + "context" + + "github.com/go-errors/errors" + + "github.com/google/uuid" + "github.com/phishingclub/phishingclub/data" + "github.com/phishingclub/phishingclub/errs" + "github.com/phishingclub/phishingclub/model" + "github.com/phishingclub/phishingclub/repository" + "gorm.io/gorm" +) + +// SeedRoles seeds roles +func SeedRoles(roleRepository *repository.Role) error { + roles := []struct { + Name string + }{ + { + Name: data.RoleSuperAdministrator, + }, + { + Name: data.RoleCompanyUser, + }, + } + for _, role := range roles { + id := uuid.New() + createRole := model.Role{ + ID: id, + Name: role.Name, + } + r, err := roleRepository.GetByName(context.Background(), role.Name) + // if error is not found, create event type + if err != nil && !errors.Is(err, gorm.ErrRecordNotFound) { + return errors.Errorf("%w: %s", errs.ErrDBSeedFailure, err) + } + if r != nil { + continue + } + _, err = roleRepository.Insert(context.TODO(), &createRole) + if err != nil { + return errors.Errorf("%w: %s", errs.ErrDBSeedFailure, err) + } + } + return nil +} diff --git a/backend/seed/smtpConfiguration_dev.go b/backend/seed/smtpConfiguration_dev.go new file mode 100644 index 0000000..268834c --- /dev/null +++ b/backend/seed/smtpConfiguration_dev.go @@ -0,0 +1,58 @@ +//go:build dev + +package seed + +import ( + "context" + + "github.com/go-errors/errors" + + "github.com/google/uuid" + "github.com/oapi-codegen/nullable" + "github.com/phishingclub/phishingclub/errs" + "github.com/phishingclub/phishingclub/model" + "github.com/phishingclub/phishingclub/repository" + "github.com/phishingclub/phishingclub/vo" + "gorm.io/gorm" +) + +// SeedDevelopmentSMTPConfiguration seeds development SMTP configuration +func SeedDevelopmentSMTPConfiguration( + smtpConfigurationRepository *repository.SMTPConfiguration, +) error { + configurations := []model.SMTPConfiguration{ + { + Name: nullable.NewNullableWithValue(*vo.NewString127Must(TEST_SMTP_CONFIGURATION_NAME_1)), + Host: nullable.NewNullableWithValue(*vo.NewString255Must("mailer")), + Port: nullable.NewNullableWithValue(*vo.NewPortMust(1025)), + Username: nullable.NewNullableWithValue(*vo.NewOptionalString255Must("")), + Password: nullable.NewNullableWithValue(*vo.NewOptionalString255Must("")), + IgnoreCertErrors: nullable.NewNullableWithValue(true), + }, + } + for _, configuration := range configurations { + id := uuid.New() + name := configuration.Name.MustGet() + configuration.ID = nullable.NewNullableWithValue(id) + c, err := smtpConfigurationRepository.GetByNameAndCompanyID( + context.Background(), + &name, + nil, + &repository.SMTPConfigurationOption{}, + ) + if err != nil && !errors.Is(err, gorm.ErrRecordNotFound) { + return errors.Errorf("%w: %s", errs.ErrDBSeedFailure, err) + } + if c != nil { + continue + } + _, err = smtpConfigurationRepository.Insert( + context.Background(), + &configuration, + ) + if err != nil { + return errors.Errorf("%w: %s", errs.ErrDBSeedFailure, err) + } + } + return nil +} diff --git a/backend/seed/utils_dev.go b/backend/seed/utils_dev.go new file mode 100644 index 0000000..1584a51 --- /dev/null +++ b/backend/seed/utils_dev.go @@ -0,0 +1,39 @@ +//go:build dev + +package seed + +import ( + "context" + + "github.com/go-errors/errors" + "github.com/phishingclub/phishingclub/errs" + "github.com/phishingclub/phishingclub/model" + "github.com/phishingclub/phishingclub/repository" +) + +// forEachDevelopmentCompany runs a function for n iterations for each test company +func forEachDevelopmentCompany( + companyRepository *repository.Company, + f func(company *model.Company) error, +) error { + for _, company := range []string{ + TEST_COMPANY_NAME_1, + TEST_COMPANY_NAME_2, + TEST_COMPANY_NAME_3, + TEST_COMPANY_NAME_4, + TEST_COMPANY_NAME_5, + } { + company, err := companyRepository.GetByName( + context.Background(), + company, + ) + if err != nil { + return errors.Errorf("%w: %s", errs.ErrDBSeedFailure, err) + } + err = f(company) + if err != nil { + return errors.Errorf("%w: %s", errs.ErrDBSeedFailure, err) + } + } + return nil +} diff --git a/backend/seed/webhooks.go b/backend/seed/webhooks.go new file mode 100644 index 0000000..21987b1 --- /dev/null +++ b/backend/seed/webhooks.go @@ -0,0 +1,42 @@ +package seed + +import ( + "context" + + "github.com/go-errors/errors" + + "github.com/oapi-codegen/nullable" + "github.com/phishingclub/phishingclub/errs" + "github.com/phishingclub/phishingclub/model" + "github.com/phishingclub/phishingclub/repository" + "github.com/phishingclub/phishingclub/vo" + "gorm.io/gorm" +) + +// SeedDevelopmentWebhooks seeds webhooks +func SeedDevelopmentWebhooks( + webhooksRepository *repository.Webhook, +) error { + webhooks := []model.Webhook{ + { + Name: nullable.NewNullableWithValue(*vo.NewString127Must("Test Webhook")), + URL: nullable.NewNullableWithValue(*vo.NewString1024Must("http://api-test-server/webhook")), + Secret: nullable.NewNullableWithValue(*vo.NewOptionalString1024Must("WEBHOOK_TEST_KEY@1234")), + }, + } + for _, webhook := range webhooks { + name := webhook.Name.MustGet() + wh, err := webhooksRepository.GetByName(context.TODO(), &name) + if err != nil && !errors.Is(err, gorm.ErrRecordNotFound) { + return errors.Errorf("%w: %s", errs.ErrDBSeedFailure, err) + } + if wh != nil { + continue + } + _, err = webhooksRepository.Insert(context.TODO(), &webhook) + if err != nil { + return errors.Errorf("%w: %s", errs.ErrDBSeedFailure, err) + } + } + return nil +} diff --git a/backend/server/startupChannel.go b/backend/server/startupChannel.go new file mode 100644 index 0000000..71a3308 --- /dev/null +++ b/backend/server/startupChannel.go @@ -0,0 +1,22 @@ +package server + +// StartupMessage is the status of the server startup +type StartupMessage struct { + Success bool + Error error +} + +// NewStartupMessage creates a new StartupMessage +func NewStartupMessage( + success bool, + err error, +) StartupMessage { + return StartupMessage{ + Success: success, + Error: err, + } +} + +func NewStartupMessageChannel() chan StartupMessage { + return make(chan StartupMessage, 1) +} diff --git a/backend/service/allowDeny.go b/backend/service/allowDeny.go new file mode 100644 index 0000000..a4ab6b0 --- /dev/null +++ b/backend/service/allowDeny.go @@ -0,0 +1,305 @@ +package service + +import ( + "context" + + "github.com/go-errors/errors" + + "github.com/google/uuid" + "github.com/phishingclub/phishingclub/data" + "github.com/phishingclub/phishingclub/errs" + "github.com/phishingclub/phishingclub/model" + "github.com/phishingclub/phishingclub/repository" + "github.com/phishingclub/phishingclub/validate" +) + +type AllowDeny struct { + Common + AllowDenyRepository *repository.AllowDeny + CampaignRepository *repository.Campaign +} + +func (s *AllowDeny) Create( + ctx context.Context, + session *model.Session, + allowDeny *model.AllowDeny, +) (*uuid.UUID, error) { + ae := NewAuditEvent("AllowDeny.Create", session) + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil { + s.LogAuthError(err) + return nil, errs.Wrap(err) + } + if !isAuthorized { + s.AuditLogNotAuthorized(ae) + return nil, errors.New("unauthorized") + } + // validate ata + if err := allowDeny.Validate(); err != nil { + return nil, errs.Wrap(err) + } + // check uniqueness + var companyID *uuid.UUID + if cid, err := allowDeny.CompanyID.Get(); err == nil { + companyID = &cid + } + name := allowDeny.Name.MustGet() + isOK, err := repository.CheckNameIsUnique( + ctx, + s.AllowDenyRepository.DB, + "allow_denies", + name.String(), + companyID, + nil, + ) + if err != nil { + s.Logger.Errorw("failed to check SMTP uniqueness", "error", err) + return nil, errs.Wrap(err) + } + if !isOK { + s.Logger.Debugw("smtp configuration name is already used", "name", name.String()) + return nil, validate.WrapErrorWithField(errors.New("is not unique"), "name") + } + // insert + id, err := s.AllowDenyRepository.Insert(ctx, allowDeny) + if err != nil { + s.Logger.Errorw("failed to insert allow deny", "error", err) + return nil, errs.Wrap(err) + } + ae.Details["id"] = id.String() + s.AuditLogAuthorized(ae) + + return id, nil +} + +func (s *AllowDeny) Update( + ctx context.Context, + session *model.Session, + id *uuid.UUID, + incoming *model.AllowDeny, +) error { + ae := NewAuditEvent("AllowDeny.Update", session) + ae.Details["updateID"] = id.String() + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil { + s.LogAuthError(err) + return err + } + if !isAuthorized { + s.AuditLogNotAuthorized(ae) + return errors.New("unauthorized") + } + // get current + current, err := s.AllowDenyRepository.GetByID(ctx, id, &repository.AllowDenyOption{}) + if err != nil { + s.Logger.Errorw("failed to get allow deny", "error", err) + } + // update config - if a field is not set, it is not updated + if v, err := incoming.Name.Get(); err == nil { + // check uniquness + var companyID *uuid.UUID + if cid, err := current.CompanyID.Get(); err == nil { + companyID = &cid + } + + isOK, err := repository.CheckNameIsUnique( + ctx, + s.AllowDenyRepository.DB, + "allow_denies", + v.String(), + companyID, + id, + ) + if err != nil { + s.Logger.Errorw("failed to check allow deny name uniqueness", + "error", err, + ) + return err + } + if !isOK { + s.Logger.Debugw("allow deny is name is not unique", "name", v.String()) + return validate.WrapErrorWithField(errors.New("is not unique"), "name") + } + current.Name.Set(v) + } + if v, err := incoming.Cidrs.Get(); err == nil { + current.Cidrs.Set(v) + } + // allow can not be changed as it could mess up a campaign that + // uses multiple entries as all entries must be allow or deny. + + // validate data + if err := current.Validate(); err != nil { + s.Logger.Errorw("failed to validate allow deny", "error", err) + return err + } + // update + err = s.AllowDenyRepository.Update(ctx, *id, current) + if err != nil { + s.Logger.Errorw("failed to update allow deny", + "id", id.String(), + "error", err, + ) + return err + } + s.AuditLogAuthorized(ae) + return nil +} + +// GetAll gets all allow deny lists +func (s *AllowDeny) GetAll( + ctx context.Context, + session *model.Session, + companyID *uuid.UUID, + options *repository.AllowDenyOption, +) (*model.Result[model.AllowDeny], error) { + ae := NewAuditEvent("AllowDeny.GetAll", session) + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil { + s.LogAuthError(err) + return &model.Result[model.AllowDeny]{}, errs.Wrap(err) + } + if !isAuthorized { + s.AuditLogNotAuthorized(ae) + return &model.Result[model.AllowDeny]{}, errors.New("unauthorized") + } + // get + allowDenies, err := s.AllowDenyRepository.GetAll( + ctx, + companyID, + options, + ) + if err != nil { + s.Logger.Errorw("failed to get allow deny", "error", err) + return &model.Result[model.AllowDeny]{}, errs.Wrap(err) + } + + // no audit logs for read + return allowDenies, nil +} + +// GetByID gets an allow deny list by ID +func (s *AllowDeny) GetByID( + ctx context.Context, + session *model.Session, + id *uuid.UUID, +) (*model.AllowDeny, error) { + ae := NewAuditEvent("AllowDeny.GetByID", session) + ae.Details["id"] = id.String() + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil { + s.LogAuthError(err) + return nil, errs.Wrap(err) + } + if !isAuthorized { + s.AuditLogNotAuthorized(ae) + return nil, errors.New("unauthorized") + } + // get + allowDeny, err := s.AllowDenyRepository.GetByID(ctx, id, &repository.AllowDenyOption{}) + if err != nil { + s.Logger.Errorw("failed to get allow deny", "error", err) + return nil, errs.Wrap(err) + } + // no audit log for read + return allowDeny, nil +} + +// GetByCompanyID gets an allow denies by ID +func (s *AllowDeny) GetByCompanyID( + ctx context.Context, + session *model.Session, + id *uuid.UUID, +) (*model.Result[model.AllowDeny], error) { + ae := NewAuditEvent("AllowDeny.GetByCompanyID", session) + ae.Details["id"] = id.String() + results := model.NewEmptyResult[model.AllowDeny]() + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil { + s.LogAuthError(err) + return results, errs.Wrap(err) + } + if !isAuthorized { + s.AuditLogNotAuthorized(ae) + return results, errors.New("unauthorized") + } + // get + allowDenies, err := s.AllowDenyRepository.GetAllByCompanyID( + ctx, + id, + &repository.AllowDenyOption{}, + ) + if err != nil { + s.Logger.Errorw("failed to get allow denies", "error", err) + return nil, errs.Wrap(err) + } + // no audit log of read + return allowDenies, nil +} + +// DeleteByID deletes an allow deny list by ID +func (s *AllowDeny) DeleteByID( + ctx context.Context, + session *model.Session, + id *uuid.UUID, +) error { + ae := NewAuditEvent("AllowDeny.DeleteByID", session) + ae.Details["id"] = id.String() + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil { + s.LogAuthError(err) + return err + } + if !isAuthorized { + s.AuditLogNotAuthorized(ae) + return errs.ErrAuthorizationFailed + } + // get campaigns afffected so we can remove any deny_page_id + affectedCampaigns, err := s.CampaignRepository.GetByAllowDenyID( + ctx, + id, + ) + if err != nil { + s.Logger.Errorw("failed to get campaigns afffected by removing allow deny list", + "allowDenyID", id.String(), + "error", err, + ) + return err + } + cids := []*uuid.UUID{} + for _, campaign := range affectedCampaigns { + cid := campaign.ID.MustGet() + cids = append(cids, &cid) + } + err = s.CampaignRepository.RemoveDenyPageByCampaignIDs( + ctx, + cids, + ) + if err != nil { + s.Logger.Errorw("failed to remove deny page from campaigns", "error", err) + return err + } + // remove allow deny list from campaigns using it + err = s.CampaignRepository.RemoveAllowDenyListsByID( + ctx, + id, + ) + if err != nil { + s.Logger.Errorw("failed to remove allow / deny lists from campaigns", "error", err) + return err + } + // delete + err = s.AllowDenyRepository.Delete(ctx, *id) + if err != nil { + s.Logger.Errorw("failed to delete allow deny", "error", err) + return err + } + s.AuditLogAuthorized(ae) + return nil +} diff --git a/backend/service/apiSender.go b/backend/service/apiSender.go new file mode 100644 index 0000000..702affb --- /dev/null +++ b/backend/service/apiSender.go @@ -0,0 +1,784 @@ +package service + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "html/template" + "io" + "net/http" + "strings" + "time" + + "github.com/go-errors/errors" + + "github.com/google/uuid" + "github.com/oapi-codegen/nullable" + "github.com/phishingclub/phishingclub/data" + "github.com/phishingclub/phishingclub/errs" + "github.com/phishingclub/phishingclub/model" + "github.com/phishingclub/phishingclub/repository" + "github.com/phishingclub/phishingclub/validate" + "github.com/phishingclub/phishingclub/vo" + "go.uber.org/zap" + "gorm.io/gorm" +) + +// APISender is a service for API sender +type APISender struct { + Common + TemplateService *Template + CampaignTemplateService *CampaignTemplate + APISenderRepository *repository.APISender +} + +// APISenderTestResponse is a response for testing API sender +type APISenderTestResponse struct { + APISender *model.APISender `json:"apiSender"` + Request map[string]interface{} `json:"request"` + Response map[string]interface{} `json:"response"` +} + +// Create creates a new API sender +func (a *APISender) Create( + ctx context.Context, + session *model.Session, + apiSender *model.APISender, +) (*uuid.UUID, error) { + ae := NewAuditEvent("ApiSender.Create", session) + + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + a.LogAuthError(err) + return nil, errs.Wrap(err) + } + if !isAuthorized { + a.AuditLogNotAuthorized(ae) + return nil, errs.ErrAuthorizationFailed + } + // validate + if err := apiSender.Validate(); err != nil { + a.Logger.Errorw("failed to validate API sender", "error", err) + return nil, errs.Wrap(err) + } + var companyID *uuid.UUID + if cid, err := apiSender.CompanyID.Get(); err == nil { + companyID = &cid + } + // check uniqueness + name := apiSender.Name.MustGet() + isOK, err := repository.CheckNameIsUnique( + ctx, + a.APISenderRepository.DB, + "api_senders", + name.String(), + companyID, + nil, + ) + if err != nil { + a.Logger.Errorw("failed to check API sender uniqueness", "error", err) + return nil, errs.Wrap(err) + } + if !isOK { + a.Logger.Debugw("AP sender name is already used", "name", name.String()) + return nil, validate.WrapErrorWithField(errors.New("is not unique"), "name") + } + // Insert the entity + id, err := a.APISenderRepository.Insert(ctx, apiSender) + if err != nil { + a.Logger.Errorw("failed to insert API sender", "error", err) + return nil, errs.Wrap(err) + } + ae.Details["id"] = id.String() + a.AuditLogAuthorized(ae) + return id, nil +} + +// GetAll gets all API senders with pagination +func (a *APISender) GetAll( + ctx context.Context, + session *model.Session, + companyID *uuid.UUID, + option repository.APISenderOption, +) (*model.Result[model.APISender], error) { + ae := NewAuditEvent("ApiSender", session) + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + a.LogAuthError(err) + return nil, errs.Wrap(err) + } + if !isAuthorized { + a.AuditLogNotAuthorized(ae) + return nil, errs.ErrAuthorizationFailed + } + // get all API senders + result, err := a.APISenderRepository.GetAll( + ctx, + companyID, + &option, + ) + if err != nil { + a.Logger.Errorw("failed to get all API senders", "error", err) + return nil, errs.Wrap(err) + } + // no audit log of reading + return result, nil +} + +// GetAllOverview gets all API senders with limited data +func (a *APISender) GetAllOverview( + ctx context.Context, + session *model.Session, + companyID *uuid.UUID, + option repository.APISenderOption, +) (*model.Result[model.APISender], error) { + ae := NewAuditEvent("ApiSender.GetAllOverview", session) + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + a.LogAuthError(err) + return nil, errs.Wrap(err) + } + if !isAuthorized { + a.AuditLogNotAuthorized(ae) + return nil, errs.ErrAuthorizationFailed + } + // get all API senders + result, err := a.APISenderRepository.GetAllOverview( + ctx, + companyID, + &option, + ) + if err != nil { + a.Logger.Errorw("failed to get all API senders", "error", err) + return nil, errs.Wrap(err) + } + // no audit log of reading + return result, nil +} + +// GetByID gets a API sender by ID +func (a *APISender) GetByID( + ctx context.Context, + session *model.Session, + id *uuid.UUID, + option *repository.APISenderOption, +) (*model.APISender, error) { + ae := NewAuditEvent("ApiSender.GetByID", session) + ae.Details["id"] = id.String() + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + a.LogAuthError(err) + return nil, errs.Wrap(err) + } + if !isAuthorized { + a.AuditLogNotAuthorized(ae) + return nil, errs.ErrAuthorizationFailed + } + // get API sender by ID + ent, err := a.APISenderRepository.GetByID( + ctx, + id, + option, + ) + if errors.Is(err, gorm.ErrRecordNotFound) { + return nil, errs.Wrap(err) + } + if err != nil { + a.Logger.Errorw("failed to get API sender by ID", "error", err) + return nil, errs.Wrap(err) + } + // no audit log for reading + return ent, nil +} + +// GetByCompanyID gets a API senders by company ID +func (a *APISender) GetByCompanyID( + ctx context.Context, + session *model.Session, + id *uuid.UUID, + option *repository.APISenderOption, +) (*model.Result[model.APISender], error) { + ae := NewAuditEvent("ApiSender.GetByCompanyID", session) + ae.Details["companyID"] = id.String() + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + a.LogAuthError(err) + return nil, errs.Wrap(err) + } + if !isAuthorized { + a.AuditLogNotAuthorized(ae) + return nil, errs.ErrAuthorizationFailed + } + // get API sender by ID + result, err := a.APISenderRepository.GetAllByCompanyID( + ctx, + id, + option, + ) + if errors.Is(err, gorm.ErrRecordNotFound) { + return nil, errs.Wrap(err) + } + if err != nil { + a.Logger.Error("failed to get API senders by company ID", zap.Error(err)) + return nil, errs.Wrap(err) + } + // no audit log of reading + return result, nil +} + +// Update updates a API sender +func (a *APISender) UpdateByID( + ctx context.Context, + session *model.Session, + id *uuid.UUID, + incoming *model.APISender, +) error { + ae := NewAuditEvent("ApiSender.UpdateByID", session) + ae.Details["id"] = id.String() + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + a.LogAuthError(err) + return err + } + if !isAuthorized { + a.AuditLogNotAuthorized(ae) + return errs.ErrAuthorizationFailed + } + var companyID *uuid.UUID + if incoming.CompanyID.IsSpecified() && !incoming.CompanyID.IsNull() { + cid := incoming.CompanyID.MustGet() + companyID = &cid + } + if incoming.Name.IsSpecified() && !incoming.Name.IsNull() { + // check uniqueness + name := incoming.Name.MustGet() + isOK, err := repository.CheckNameIsUnique( + ctx, + a.APISenderRepository.DB, + "api_senders", + name.String(), + companyID, + id, + ) + if err != nil { + a.Logger.Errorw("failed to check API sender uniqueness", "error", err) + return err + } + if !isOK { + a.Logger.Debugw("AP sender name is not unique", "name", name.String()) + return validate.WrapErrorWithField(errors.New("is not unique"), "name") + } + } + // update the api sender - if a field is present not not null update it + current, err := a.APISenderRepository.GetByID(ctx, id, &repository.APISenderOption{}) + if err != nil { + a.Logger.Errorw("failed to get API sender by ID", "error", err) + return err + } + if v, err := incoming.Name.Get(); err == nil { + current.Name.Set(v) + } + if v, err := incoming.APIKey.Get(); err == nil { + current.APIKey.Set(v) + } + if v, err := incoming.CustomField1.Get(); err == nil { + current.CustomField1.Set(v) + } + if v, err := incoming.CustomField2.Get(); err == nil { + current.CustomField2.Set(v) + } + if v, err := incoming.CustomField3.Get(); err == nil { + current.CustomField3.Set(v) + } + if v, err := incoming.CustomField4.Get(); err == nil { + current.CustomField4.Set(v) + } + if v, err := incoming.RequestMethod.Get(); err == nil { + current.RequestMethod.Set(v) + } + if v, err := incoming.RequestURL.Get(); err == nil { + current.RequestURL.Set(v) + } + if v, err := incoming.RequestHeaders.Get(); err == nil { + current.RequestHeaders.Set(v) + } + if v, err := incoming.RequestBody.Get(); err == nil { + current.RequestBody.Set(v) + } + if incoming.ExpectedResponseStatusCode.IsSpecified() { + if v, err := incoming.ExpectedResponseStatusCode.Get(); err == nil { + current.ExpectedResponseStatusCode.Set(v) + } else { + current.ExpectedResponseStatusCode.SetNull() + } + } + if v, err := incoming.ExpectedResponseHeaders.Get(); err == nil { + current.ExpectedResponseHeaders.Set(v) + } + if v, err := incoming.ExpectedResponseBody.Get(); err == nil { + current.ExpectedResponseBody.Set(v) + } + if err := current.Validate(); err != nil { + a.Logger.Errorw("failed to validate API sender", "error", err) + return err + } + err = a.APISenderRepository.UpdateByID(ctx, id, current) + if err != nil { + a.Logger.Errorw("failed to update API sender", "error", err) + return err + } + a.AuditLogAuthorized(ae) + return nil +} + +// DeleteByID deletes a API sender by ID +func (a *APISender) DeleteByID( + ctx context.Context, + session *model.Session, + id *uuid.UUID, +) error { + ae := NewAuditEvent("ApiSender.DeleteByID", session) + ae.Details["id"] = id.String() + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + a.LogAuthError(err) + return err + } + if !isAuthorized { + a.AuditLogNotAuthorized(ae) + return errs.ErrAuthorizationFailed + } + // delete the relation from the campaign templates + err = a.CampaignTemplateService.removeAPISenderIDBySenderID( + ctx, + session, + id, + ) + if err != nil { + a.Logger.Errorw("failed to remove API sender relation from campaign templates", + "error", err, + ) + return err + } + // delete the entity + err = a.APISenderRepository.DeleteByID(ctx, id) + if err != nil { + a.Logger.Errorw("failed to delete API sender", "error", err) + return err + } + a.AuditLogAuthorized(ae) + + return nil +} + +// SendTest sends a test request to the API sender +// and returns the response +func (a *APISender) SendTest( + ctx context.Context, + session *model.Session, + id *uuid.UUID, +) (*APISenderTestResponse, error) { + ae := NewAuditEvent("ApiSender.SendTest", session) + ae.Details["id"] = id.String() + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + a.LogAuthError(err) + return nil, errs.Wrap(err) + } + if !isAuthorized { + a.CampaignTemplateService.AuditLogNotAuthorized(ae) + return nil, errs.ErrAuthorizationFailed + } + a.Logger.Debugw("sending test request to API sender", "id", id.String()) + // get the API sender + apiSender, err := a.APISenderRepository.GetByID(ctx, id, &repository.APISenderOption{}) + if err != nil { + a.Logger.Errorw("failed to get API sender by ID", "error", err) + return nil, errs.Wrap(err) + } + emailRaw := "bob@enterprise.test" + email := *vo.NewEmailMust(emailRaw) + cid := nullable.NewNullableWithValue(uuid.New()) + testEmail := &model.Email{ + Name: nullable.NewNullableWithValue( + *vo.NewString64Must("Test Email"), + ), + MailEnvelopeFrom: nullable.NewNullableWithValue( + *vo.NewMailEnvelopeFromMust(emailRaw), + ), + MailHeaderFrom: nullable.NewNullableWithValue( + *vo.NewEmailMust( + fmt.Sprintf("Bob <%s>", emailRaw), + ), + ), + MailHeaderSubject: nullable.NewNullableWithValue( + *vo.NewOptionalString255Must("Test Email Subject"), + ), + Content: nullable.NewNullableWithValue( + *vo.NewOptionalString1MBMust("Hi {{.FirstName}},\n\nThis is a test email.\n\nBest,\nBob"), + ), + AddTrackingPixel: nullable.NewNullableWithValue(false), + } + testCampaignRecipient := &model.CampaignRecipient{ + ID: cid, + Recipient: &model.Recipient{ + ID: cid, + Email: nullable.NewNullableWithValue( + email, + ), + Phone: nullable.NewNullableWithValue( + *vo.NewOptionalString127Must("+1234567890"), + ), + ExtraIdentifier: nullable.NewNullableWithValue( + *vo.NewOptionalString127Must("extra-test-identifier"), + ), + FirstName: nullable.NewNullableWithValue( + *vo.NewOptionalString127Must("Bob"), + ), + LastName: nullable.NewNullableWithValue( + *vo.NewOptionalString127Must("Test"), + ), + Position: nullable.NewNullableWithValue( + *vo.NewOptionalString127Must("Lead API Tester"), + ), + Department: nullable.NewNullableWithValue( + *vo.NewOptionalString127Must("Research and Development"), + ), + City: nullable.NewNullableWithValue( + *vo.NewOptionalString127Must("Odin"), + ), + Country: nullable.NewNullableWithValue( + *vo.NewOptionalString127Must("Denmark"), + ), + Misc: nullable.NewNullableWithValue( + *vo.NewOptionalString127Must("This is a test recipient"), + ), + Company: &model.Company{ + Name: nullable.NewNullableWithValue( + *vo.NewString64Must("Ravn Enterprise."), + ), + }, + }, + } + url, headers, body, err := a.buildRequest( + apiSender, + "id", + "foo/bar", + "api-sender-test.test", + testCampaignRecipient, + testEmail, + ) + if err != nil { + a.Logger.Errorw("failed to build test request", "error", err) + return nil, errs.Wrap(err) + } + requestBody := body.String() + res, resBodyClose, err := a.sendRequest( + context.Background(), + apiSender, + headers, + url, + body, + ) + if err != nil { + a.Logger.Errorw("failed to send test request", "error", err) + return nil, errs.Wrap(err) + } + defer resBodyClose() + responseBody, err := io.ReadAll(res.Body) + if err != nil { + a.Logger.Errorw("failed to read response body", "error", err) + return nil, errs.Wrap(err) + } + resData := map[string]any{ + "code": res.StatusCode, + "status": res.Status, + "headers": res.Header, + "body": string(responseBody), + } + data := &APISenderTestResponse{ + APISender: apiSender, + Request: map[string]any{ + "url": url.String(), + "headers": headers, + "body": requestBody, + }, + Response: resData, + } + a.AuditLogAuthorized(ae) + return data, nil +} + +// Send is a service method that builds and sends a API Sender request +// it does not use auth and must not be used without consideration directly by a controller +func (a *APISender) Send( + ctx context.Context, + session *model.Session, + cTemplate *model.CampaignTemplate, + campaignRecipient *model.CampaignRecipient, + domain *model.Domain, + mailTmpl *template.Template, + email *model.Email, +) error { + // get sender details + apiSenderID, err := cTemplate.APISenderID.Get() + if err != nil { + a.Logger.Infow( + "failed to get API Sender relation from template. Template is incomplete", + "error", err, + ) + return err + } + apiSender, err := a.GetByID( + ctx, + session, + &apiSenderID, + &repository.APISenderOption{}, + ) + if err != nil && !errors.Is(err, gorm.ErrRecordNotFound) { + return fmt.Errorf("api sender did not load: %s", err) + } + if apiSender == nil { + return errors.New("api sender did not load") + } + domainName := domain.Name.MustGet() + urlIdentifier := cTemplate.URLIdentifier + if urlIdentifier == nil { + return errors.New("url identifier MUST be loaded in campaign template") + } + urlPath := cTemplate.URLPath.MustGet().String() + url, headers, body, err := a.buildRequest( + apiSender, + domainName.String(), + urlIdentifier.Name.MustGet(), + urlPath, + campaignRecipient, + email, + ) + resp, respBodyClose, err := a.sendRequest( + context.Background(), + apiSender, + headers, + url, + body, + ) + if err != nil { + a.Logger.Errorw("failed to build and send api sender request", "error", err) + return err + } + defer respBodyClose() + // check if response matches expectations + nullableExpectedResponseCode := apiSender.ExpectedResponseStatusCode + if nullableExpectedResponseCode.IsSpecified() && !nullableExpectedResponseCode.IsNull() { + expectedResponseStatusCode := nullableExpectedResponseCode.MustGet() + if resp.StatusCode != expectedResponseStatusCode { + a.Logger.Debugw("api sender got unexpected response status code", + "statusCode", resp.StatusCode, + ) + return fmt.Errorf("unexpected response status code: %d", resp.StatusCode) + } + } + // check for expected headers + nullableExpectedHeaders := apiSender.ExpectedResponseHeaders + if nullableExpectedHeaders.IsSpecified() && !nullableExpectedHeaders.IsNull() { + expectedHeaders := nullableExpectedHeaders.MustGet() + for _, expectedHeader := range expectedHeaders.Headers { + header := resp.Header.Get(expectedHeader.Key) + + if !strings.Contains(header, expectedHeader.Value) { + a.Logger.Debugw("api sender got unexpected response header", + "expectedKey", expectedHeader.Key, + "expectedValue", expectedHeader.Value, + "header", header, + ) + return fmt.Errorf("unexpected response header: expected '%s' to contain '%s' but has '%s'", expectedHeader.Key, expectedHeader.Value, header) + } + } + } + nullableExpectedBody := apiSender.ExpectedResponseBody + if nullableExpectedBody.IsSpecified() && !nullableExpectedBody.IsNull() { + expectedBody := nullableExpectedBody.MustGet() + // check for expected body + resBody, err := io.ReadAll(resp.Body) + if err != nil { + a.Logger.Errorw("failed to read response body", "error", err) + return err + } + if !bytes.Contains(resBody, []byte(expectedBody.String())) { + a.Logger.Debugw("api sender got unexpected response body", + "expectedBody", expectedBody, + "body", resp.Body, + ) + return fmt.Errorf( + "unexpected response body: expected '%s' to contain '%s'", + expectedBody, + resp.Body, + ) + } + } + return nil +} + +func (a *APISender) buildHeader( + apiSender *model.APISender, +) ([]*model.HTTPHeader, error) { + // setup headers + apiReqHeaders := []*model.HTTPHeader{} + requestHeaders := apiSender.RequestHeaders + if requestHeaders.IsSpecified() && !requestHeaders.IsNull() { + for _, header := range requestHeaders.MustGet().Headers { + keyTemplate := template.New("key") + keyTemplate, err := keyTemplate.Parse(header.Key) + if err != nil { + return nil, fmt.Errorf("failed to parse header key: %s", err) + } + keyTemplate = keyTemplate.Funcs(TemplateFuncs()) + var key bytes.Buffer + if err := keyTemplate.Execute(&key, nil); err != nil { + return nil, errs.Wrap(err) + } + valueTemplate := template.New("value") + valueTemplate, err = valueTemplate.Parse(header.Value) + if err != nil { + return nil, fmt.Errorf("failed to parse header value: %s", err) + } + var value bytes.Buffer + if err := valueTemplate.Execute(&value, nil); err != nil { + return nil, fmt.Errorf("failed to execute value template: %s", err) + } + apiReqHeaders = append( + apiReqHeaders, + &model.HTTPHeader{ + Key: key.String(), + Value: value.String(), + }, + ) + } + } + return apiReqHeaders, nil +} + +// sendRequest builds and sends the request to the API +// it returns the response, a function to close the response body, and an error +// the close method MUST be called to avoid leaking resources +func (a *APISender) sendRequest( + ctx context.Context, + apiSender *model.APISender, + apiRequestHeaders []*model.HTTPHeader, + apiRequestURL *apiRequestURL, + apiRequestBody *apiRequestBody, +) (*http.Response, func(), error) { + // prepare request + reqCtx, reqCancel := context.WithTimeout(ctx, 3*time.Second) + defer reqCancel() + if apiRequestBody == nil { + apiRequestBody = bytes.NewBuffer([]byte{}) + } + req, err := http.NewRequestWithContext( + reqCtx, + apiSender.RequestMethod.MustGet().String(), + apiRequestURL.String(), + apiRequestBody, + ) + if err != nil { + return nil, func() {}, errs.Wrap(err) + } + // TODO these headers should be enrished with template variables like {{.FirstName}} or etc + for _, header := range apiRequestHeaders { + req.Header.Set(header.Key, header.Value) + } + // send request + a.Logger.Debugw("sending request", "URL", apiRequestURL.String()) + resp, err := http.DefaultClient.Do(req) + if err != nil { + return nil, func() {}, errs.Wrap(err) + } + // #nosec + return resp, func() { resp.Body.Close() }, nil +} + +type apiRequestURL = bytes.Buffer +type apiRequestBody = bytes.Buffer + +func (a *APISender) buildRequest( + apiSender *model.APISender, + domainName string, + urlKey string, + urlPath string, + campaignRecipient *model.CampaignRecipient, + email *model.Email, // todo is this superfluous? it should be in the campaign recipient? +) (*apiRequestURL, []*model.HTTPHeader, *apiRequestBody, error) { + // setup headers + apiReqHeaders, err := a.buildHeader(apiSender) + if err != nil { + return nil, nil, nil, fmt.Errorf("failed to build headers: %s", err) + } + // setup URL + requestURL := apiSender.RequestURL.MustGet() + urlTemplate := template.New("url") + urlTemplate = urlTemplate.Funcs(TemplateFuncs()) + urlTemplate, err = urlTemplate.Parse(requestURL.String()) + if err != nil { + return nil, nil, nil, fmt.Errorf("failed to parse url: %s", err) + } + t := a.TemplateService.CreateMail( + domainName, + urlKey, + urlPath, + campaignRecipient, + email, + apiSender, + ) + var apiURL bytes.Buffer + if err := urlTemplate.Execute(&apiURL, t); err != nil { + return nil, nil, nil, fmt.Errorf("failed to execute url template: %s", err) + } + // setup body + // first parse and execute the mail content + mailContentTemplate := template.New("mailContent") + mailContentTemplate = mailContentTemplate.Funcs(TemplateFuncs()) + content, err := email.Content.Get() + if err != nil { + return nil, nil, nil, fmt.Errorf("failed to get email content: %s", err) + } + mailTemplate, err := mailContentTemplate.Parse(content.String()) + if err != nil { + return nil, nil, nil, fmt.Errorf("failed to parse body: %s", err) + } + var mailContent bytes.Buffer + if err := mailTemplate.Execute(&mailContent, t); err != nil { + return nil, nil, nil, fmt.Errorf("failed to execute mail template: %s", err) + } + // Properly encode for JSON + var buf bytes.Buffer + encoder := json.NewEncoder(&buf) + encoder.SetEscapeHTML(false) + if err := encoder.Encode(mailContent.String()); err != nil { + return nil, nil, nil, fmt.Errorf("failed to marshal mail content: %s", err) + } + // Remove the newline that Encode adds and the surrounding quotes + jsonStr := strings.TrimSpace(buf.String()) + + // Mark as safe HTML so template won't escape it + (*t)["Content"] = template.HTML(jsonStr[1 : len(jsonStr)-1]) + contentTemplate := template.New("content") + contentTemplate = contentTemplate.Funcs(TemplateFuncs()) + contentTemplate, err = contentTemplate.Parse(apiSender.RequestBody.MustGet().String()) + if err != nil { + return nil, nil, nil, fmt.Errorf("failed to parse body: %s", err) + } + var body bytes.Buffer + if err := contentTemplate.Execute(&body, t); err != nil { + return nil, nil, nil, fmt.Errorf("failed to execute body template: %s", err) + } + return &apiURL, apiReqHeaders, &body, nil +} diff --git a/backend/service/asset.go b/backend/service/asset.go new file mode 100644 index 0000000..3bf4b54 --- /dev/null +++ b/backend/service/asset.go @@ -0,0 +1,657 @@ +package service + +import ( + "context" + "fmt" + "path/filepath" + "strings" + + "github.com/go-errors/errors" + + securejoin "github.com/cyphar/filepath-securejoin" + "github.com/gin-gonic/gin" + "github.com/google/uuid" + "github.com/oapi-codegen/nullable" + "github.com/phishingclub/phishingclub/data" + "github.com/phishingclub/phishingclub/errs" + "github.com/phishingclub/phishingclub/model" + "github.com/phishingclub/phishingclub/repository" + "github.com/phishingclub/phishingclub/validate" + "github.com/phishingclub/phishingclub/vo" + "gorm.io/gorm" +) + +// Asset is a Asset service +type Asset struct { + Common + RootFolder string + FileService *File + AssetRepository *repository.Asset + DomainRepository *repository.Domain +} + +// Create creates and stores a new assets +func (a *Asset) Create( + g *gin.Context, + session *model.Session, + assets []*model.Asset, +) ([]*uuid.UUID, error) { + ids := []*uuid.UUID{} + ae := NewAuditEvent("Asset.Create", session) + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + a.LogAuthError(err) + return ids, errs.Wrap(err) + } + if !isAuthorized { + a.AuditLogNotAuthorized(ae) + return ids, errs.ErrAuthorizationFailed + } + // @TODO for now we allow dublicate names - should we? + // without no dubs it is easier to reason between assets + // with dubs it is easier to import a collection of files and etc + + // upload the files + contextFolder := "" + // ensure that all assets have the same context + // and map assets to files + // @TODO move out of here + differentContextError := fmt.Errorf( + "all assets must have the same context '%s'", + contextFolder, + ) + + files := []*FileUpload{} + for _, asset := range assets { + domainNameProvided := asset.DomainName.IsSpecified() && !asset.DomainName.IsNull() + // ensure context is the same across all files + if !domainNameProvided && (!asset.CompanyID.IsSpecified() || asset.CompanyID.IsNull()) { + contextFolder = data.ASSET_GLOBAL_FOLDER + } else { + // set the context folder + dn, err := asset.DomainName.Get() + if err != nil { + a.Logger.Debugw("failed to get domain name", "error", err) + return ids, errs.Wrap(err) + } + domainName := dn.String() + if contextFolder == "" { + contextFolder = domainName + } else if contextFolder != domainName { + a.Logger.Error(differentContextError) + return ids, differentContextError + } + } + + // map assets to files + path := "" + pp, err := asset.Path.Get() + if err != nil { + a.Logger.Debugw("failed to get path", "error", err) + return ids, errs.Wrap(err) + } + if p := pp.String(); len(p) > 0 { + // ensure the path is safe to use + + // check if the first char is a / if it is, strip it + p = strings.TrimPrefix(p, "/") + if strings.Contains(p, "..") || strings.HasPrefix(p, "/") { + a.Logger.Warnw("insecure path", "path", p) + return ids, validate.WrapErrorWithField( + errs.NewValidationError(fmt.Errorf("invalid path: %s", p)), + "Path", + ) + } + path = p + } + path, err = securejoin.SecureJoin(path, asset.File.Filename) + if err != nil { + a.Logger.Warnw("insecure path", "error", err) + return ids, validate.WrapErrorWithField( + errs.NewValidationError(err), + "Path", + ) + } + // relative path is used in the DB + relativePath, err := vo.NewRelativeFilePath(path) + if err != nil { + a.Logger.Debugw("failed to make file path", "error", err) + return ids, validate.WrapErrorWithField( + errs.NewValidationError(err), + "Path", + ) + } + // TODO a global asset can be attached to a global domain but + // a company domain can not have a global asset ( asset without company id ) + // a company domain can not have a domain that belongs to another company + if asset.DomainID.IsSpecified() && !asset.DomainID.IsNull() { + assetDomainID := asset.DomainID.MustGet() + domain, err := a.DomainRepository.GetByID( + g, + &assetDomainID, + &repository.DomainOption{}, + ) + if err != nil { + a.Logger.Debugw("failed to get domain by asset", "error", err) + return ids, errs.Wrap(err) + } + // a company domain can not have a global asset ( asset without company id ) + domainHasCompanyRelation := domain.CompanyID.IsSpecified() && !domain.CompanyID.IsNull() + assetHasCompanyRelation := asset.CompanyID.IsSpecified() && !asset.CompanyID.IsNull() + if !assetHasCompanyRelation && domainHasCompanyRelation { + a.Logger.Debug("company id is required for domain") + return ids, errs.NewCustomError(errors.New("shared view (no asset company id) can not be attached to a domain with a company id")) + } + // company domain can not have a domain company that belongs to another company and is not global + if domainHasCompanyRelation && assetHasCompanyRelation { + if domain.CompanyID.MustGet().String() != asset.CompanyID.MustGet().String() { + a.Logger.Debug("domain company id is not the same as asset company id") + return ids, errs.NewCustomError(errors.New("domain company id is not the same as asset company id")) + } + } + } + + // this is a bit dirty, but I will do it anyway + // overwriting the path the client assigned with the context relative path including the file name + asset.Path = nullable.NewNullableWithValue(*relativePath) + // full path is used in the file system + pathWithRootAndDomainContext, err := securejoin.SecureJoin(a.RootFolder, contextFolder) + if err != nil { + a.Logger.Debugw("insecure path", "path", err) + return ids, fmt.Errorf("insecure path: %s", err) + } + pathWithRootAndDomainContext, err = securejoin.SecureJoin( + pathWithRootAndDomainContext, + path, + ) + if err != nil { + a.Logger.Debugw("insecure path", "error", err) + return ids, fmt.Errorf("insecure path: %s", err) + } + a.Logger.Debugw("file path", + "relative", relativePath.String(), + "relativeWithRootPath", pathWithRootAndDomainContext, + ) + asset.Path = nullable.NewNullableWithValue(*relativePath) // path to file from within the context + + files = append(files, NewFileUpload(pathWithRootAndDomainContext, &asset.File)) + } + // upload files to the file system + _, err = a.FileService.Upload( + g, + files, + ) + if err != nil { + a.Logger.Debugw("failed to upload files", "error", err) + return ids, errs.Wrap(err) + } + idsStr := []string{} + // save uploaded files to the database + for _, asset := range assets { + id, err := a.AssetRepository.Insert( + g, + asset, + ) + if err != nil { + a.Logger.Debugw("failed to save asset", "error", err) + // TODO remove all previously uploaded files + // buut maybe not, it would be annoying if there is a multi user system + // and a user uploads a huge amount of files and one fails and does this + // repeatedly to burn the server + return ids, errs.Wrap(err) + } + ids = append(ids, id) + idsStr = append(idsStr, id.String()) + } + ae.Details["assetIDs"] = idsStr + a.AuditLogAuthorized(ae) + + return ids, nil +} + +// GetAll gets all assets +func (a *Asset) GetAll( + ctx context.Context, + session *model.Session, + domainID *uuid.UUID, + companyID *uuid.UUID, + queryArgs *vo.QueryArgs, +) (*model.Result[model.Asset], error) { + result := model.NewEmptyResult[model.Asset]() + ae := NewAuditEvent("Asset.GetAll", session) + if domainID != nil { + ae.Details["domainID"] = domainID.String() + } + if companyID != nil { + ae.Details["companyID"] = companyID.String() + } + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + a.LogAuthError(err) + return result, errs.Wrap(err) + } + if !isAuthorized { + a.AuditLogNotAuthorized(ae) + return result, errs.ErrAuthorizationFailed + } + // if there is no companyID or domainID then the scope is 'shared' + if companyID == nil && domainID == nil { + result, err = a.AssetRepository.GetAllByGlobalContext( + ctx, + queryArgs, + ) + if err != nil && !errors.Is(err, gorm.ErrRecordNotFound) { + a.Logger.Errorw("failed to get global asset", "error", err) + return nil, errs.Wrap(err) + } + } else { + if domainID == nil { + a.Logger.Errorw("domain id required", "error", errors.New("domainID is nil")) + return nil, fmt.Errorf("domain id is required") + } + result, err = a.AssetRepository.GetAllByDomainAndContext( + ctx, + domainID, + companyID, + queryArgs, + ) + if err != nil && !errors.Is(err, gorm.ErrRecordNotFound) { + a.Logger.Errorw("failed to get domain assets", "error", err) + return nil, errs.Wrap(err) + } + } + // no audit log for read + return result, nil +} + +// GetByID gets an asset by id +func (a *Asset) GetByID( + ctx context.Context, + session *model.Session, + id *uuid.UUID, +) (*model.Asset, error) { + ae := NewAuditEvent("Asset.GetById", session) + ae.Details["id"] = id.String() + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + a.LogAuthError(err) + return nil, errs.Wrap(err) + } + if !isAuthorized { + a.AuditLogNotAuthorized(ae) + return nil, errs.ErrAuthorizationFailed + } + // get the asset + asset, err := a.AssetRepository.GetByID( + ctx, + id, + ) + if err != nil { + a.Logger.Debugw("asset not found", + "id", id.String(), + "error", err, + ) + return nil, errs.Wrap(err) + } + // no audit on read + return asset, nil +} + +// GetByID gets an asset by path +func (a *Asset) GetByPath( + ctx context.Context, + session *model.Session, + path string, +) (*model.Asset, error) { + ae := NewAuditEvent("Asset.GetByPath", session) + ae.Details["path"] = path + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + a.LogAuthError(err) + return nil, errs.Wrap(err) + } + if !isAuthorized { + a.AuditLogNotAuthorized(ae) + return nil, errs.ErrAuthorizationFailed + } + // get the asset + asset, err := a.AssetRepository.GetByPath(ctx, path) + if err != nil { + a.Logger.Debugw("asset not found by path", + "path", path, + "error", err, + ) + return nil, errs.Wrap(err) + } + // no audit on read + return asset, nil +} + +// UpdateByID updates an asset by id +func (a *Asset) UpdateByID( + ctx context.Context, + session *model.Session, + id *uuid.UUID, + name nullable.Nullable[vo.OptionalString127], + description nullable.Nullable[vo.OptionalString255], +) error { + ae := NewAuditEvent("Asset.UpdateById", session) + ae.Details["id"] = id.String() + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + a.LogAuthError(err) + return err + } + if !isAuthorized { + a.AuditLogNotAuthorized(ae) + return errs.ErrAuthorizationFailed + } + // get the current + current, err := a.AssetRepository.GetByID( + ctx, + id, + ) + if err != nil { + a.Logger.Debugw("asset not found", "error", err) + return err + } + // update the asset + current.Name = name + current.Description = description + // validate + if err := current.Validate(); err != nil { + a.Logger.Debugw("failed to validate asset", "error", err) + return err + } + // save the change + err = a.AssetRepository.UpdateByID( + ctx, + id, + current, + ) + if err != nil { + a.Logger.Errorw("failed to update asset", "error", err) + return err + } + a.AuditLogAuthorized(ae) + return nil +} + +// DeleteByID deletes an asset by id +func (a *Asset) DeleteByID( + ctx context.Context, + session *model.Session, + id *uuid.UUID, +) error { + ae := NewAuditEvent("Asset.DeleteById", session) + ae.Details["id"] = id.String() + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + a.LogAuthError(err) + return err + } + if !isAuthorized { + a.AuditLogNotAuthorized(ae) + return errs.ErrAuthorizationFailed + } + // get the asset + asset, err := a.AssetRepository.GetByID( + ctx, + id, + ) + if err != nil { + a.Logger.Debugw("asset not found", + "id", id.String(), + "error", err, + ) + return err + } + // delete the file + domainContext := data.ASSET_GLOBAL_FOLDER + if domainName, err := asset.DomainName.Get(); err == nil { + domainContext = domainName.String() + } + p, err := asset.Path.Get() + if err != nil { + a.Logger.Debugw("failed to get path", "error", err) + return err + } + + domainRoot, err := securejoin.SecureJoin(a.RootFolder, domainContext) + if err != nil { + a.Logger.Debugw("insecure path", "error", err) + return err + } + filePath, err := securejoin.SecureJoin(domainRoot, p.String()) + if err != nil { + a.Logger.Debugw("insecure path", "error", err) + return err + } + + err = a.FileService.Delete( + filePath, + ) + if err != nil { + a.Logger.Debugw("failed to delete file", + "path", filePath, + "error", err, + ) + return err + } + err = a.FileService.RemoveEmptyFolderRecursively( + domainRoot, + filepath.Dir(filePath), + ) + if err != nil { + a.Logger.Debugw("failed to remove empty folders", + "path", filePath, + "error", err, + ) + return err + } + // delete the asset from the database + err = a.AssetRepository.DeleteByID( + ctx, + id, + ) + if err != nil { + a.Logger.Errorw("failed to delete asset from database but the file is deleted", + "path", filePath, + "error", err, + ) + return err + } + ae.Details["path"] = filePath + a.AuditLogAuthorized(ae) + return nil +} + +// DeleteAllByCompanyID deletes all assets by company ID +func (a *Asset) DeleteAllByCompanyID( + ctx context.Context, + session *model.Session, + companyID *uuid.UUID, +) error { + ae := NewAuditEvent("Asset.DeleteAllByCompanyID", session) + if companyID != nil { + ae.Details["companyID"] = companyID + } + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + a.LogAuthError(err) + return err + } + if !isAuthorized { + a.AuditLogNotAuthorized(ae) + return errs.ErrAuthorizationFailed + } + // get assets + assets, err := a.AssetRepository.GetAllByCompanyID( + ctx, + companyID, + ) + if err != nil { + a.Logger.Debugw("asset not found", "error", err) + return err + } + for _, asset := range assets { + // delete the file + domainContext := data.ASSET_GLOBAL_FOLDER + if domainName, err := asset.DomainName.Get(); err == nil { + domainContext = domainName.String() + } + p, err := asset.Path.Get() + if err != nil { + a.Logger.Debugw("failed to get path", "error", err) + return err + } + domainRoot, err := securejoin.SecureJoin(a.RootFolder, domainContext) + if err != nil { + a.Logger.Debugw("insecure path", "error", err) + return err + } + filePath, err := securejoin.SecureJoin(domainRoot, p.String()) + if err != nil { + a.Logger.Debugw("insecure path", "error", err) + return err + } + err = a.FileService.Delete( + filePath, + ) + if err != nil { + a.Logger.Debugw("failed to delete file", + "path", filePath, + "error", err, + ) + return err + } + err = a.FileService.RemoveEmptyFolderRecursively( + domainRoot, + filepath.Dir(filePath), + ) + if err != nil { + a.Logger.Debugw("failed to remove empty folders", + "path", filePath, + "error", err, + ) + return err + } + // delete the asset from the database + err = a.AssetRepository.DeleteByID( + ctx, + companyID, + ) + if err != nil { + a.Logger.Errorw("failed to delete asset from database but the file is deleted", + "path", filePath, + "error", err, + ) + return err + } + } + a.AuditLogAuthorized(ae) + return nil +} + +// DeleteAllByDomainID deletes all assets by domain ID +func (a *Asset) DeleteAllByDomainID( + ctx context.Context, + session *model.Session, + domainID *uuid.UUID, +) error { + ae := NewAuditEvent("Asset.DeleteAllByDomainID", session) + if domainID != nil { + ae.Details["domainId"] = domainID.String() + } + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + a.LogAuthError(err) + return err + } + if !isAuthorized { + a.AuditLogNotAuthorized(ae) + return errs.ErrAuthorizationFailed + } + // get assets + assets, err := a.AssetRepository.GetAllByCompanyID( + ctx, + domainID, + ) + if err != nil { + a.Logger.Debugw("assets not found by domain ID", + "domainID", domainID.String(), + "error", err, + ) + return err + } + // delete + for _, asset := range assets { + + // delete the file + domainContext := data.ASSET_GLOBAL_FOLDER + if domainName, err := asset.DomainName.Get(); err == nil { + domainContext = domainName.String() + } + p, err := asset.Path.Get() + if err != nil { + a.Logger.Debugw("failed to get path", + "error", err, + ) + return err + } + + domainRoot, err := securejoin.SecureJoin(a.RootFolder, domainContext) + if err != nil { + a.Logger.Debugw("insecure path", "error", err) + return err + } + filePath, err := securejoin.SecureJoin(domainRoot, p.String()) + if err != nil { + a.Logger.Debugw("insecure path", "error", err) + return err + } + err = a.FileService.Delete( + filePath, + ) + if err != nil { + a.Logger.Debugw("failed to delete file", + "path", filePath, + "error", err, + ) + return err + } + err = a.FileService.RemoveEmptyFolderRecursively( + domainRoot, + filepath.Dir(filePath), + ) + if err != nil { + a.Logger.Debugw("failed to remove empty folders", + "path", filePath, + "error", err, + ) + return err + } + // delete the asset from the database + err = a.AssetRepository.DeleteByID( + ctx, + domainID, + ) + if err != nil { + a.Logger.Errorw("failed to delete asset from database but the file is deleted", + "path", filePath, + "error", err, + ) + return err + } + } + a.AuditLogAuthorized(ae) + return nil +} diff --git a/backend/service/attachment.go b/backend/service/attachment.go new file mode 100644 index 0000000..e276c1d --- /dev/null +++ b/backend/service/attachment.go @@ -0,0 +1,418 @@ +package service + +import ( + "context" + "fmt" + "path/filepath" + + "github.com/go-errors/errors" + + securejoin "github.com/cyphar/filepath-securejoin" + "github.com/gin-gonic/gin" + "github.com/google/uuid" + "github.com/oapi-codegen/nullable" + "github.com/phishingclub/phishingclub/data" + "github.com/phishingclub/phishingclub/errs" + "github.com/phishingclub/phishingclub/model" + "github.com/phishingclub/phishingclub/repository" + "github.com/phishingclub/phishingclub/vo" + "gorm.io/gorm" +) + +// Attachment is a Attachment service +type Attachment struct { + Common + RootFolder string + FileService *File + AttachmentRepository *repository.Attachment + EmailRepository *repository.Email +} + +// Create creates and stores a new attachments +func (a *Attachment) Create( + g *gin.Context, + session *model.Session, + attachments []*model.Attachment, +) ([]*uuid.UUID, error) { + ae := NewAuditEvent("Attachment.Create", session) + createdIDs := []*uuid.UUID{} + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + a.LogAuthError(err) + return createdIDs, errs.Wrap(err) + } + if !isAuthorized { + a.AuditLogNotAuthorized(ae) + return createdIDs, errs.ErrAuthorizationFailed + } + // @TODO for now we allow dublicate names - should we? + // without no dubs it is easier to reason between attachments + // with dubs it is easier to import a collection of files and etc + + // upload the files + contextFolder := "" + // ensure that all attachments have the same context + // and map attachments to files + // @TODO move out of here + differentContextError := fmt.Errorf( + "all attachments must have the same context '%s'", + contextFolder, + ) + files := []*FileUpload{} + filePaths := []string{} + for _, attachment := range attachments { + // ensure context is the same across all files + if attachment.CompanyID.IsSpecified() && attachment.CompanyID.IsNull() { + if contextFolder == "" { + contextFolder = data.ASSET_GLOBAL_FOLDER + } else if contextFolder != data.ASSET_GLOBAL_FOLDER { + a.Logger.Error(differentContextError) + return createdIDs, differentContextError + } + } else { + companyID, err := attachment.CompanyID.Get() + if err != nil { + a.Logger.Debugw("failed to get company id", "error", err) + return createdIDs, errs.Wrap(err) + } + if contextFolder == "" { + contextFolder = companyID.String() + } else if contextFolder != companyID.String() { + a.Logger.Error(differentContextError) + return createdIDs, differentContextError + } + } + // map attachments to files + attachmentFilename := filepath.Clean(attachment.File.Filename) + // relative path is used in the DB + relativePath, err := vo.NewRelativeFilePath(attachmentFilename) + if err != nil { + a.Logger.Debugw("failed to make file path", + "path", attachmentFilename, + err, + ) + return createdIDs, errs.Wrap(err) + } + // full path is used in the file system + pathWithRootAndDomainContext, err := securejoin.SecureJoin( + a.RootFolder, + contextFolder, + ) + if err != nil { + a.Logger.Infow("insecure path", "error", err) + return createdIDs, errs.Wrap(err) + } + pathWithRootAndDomainContext, err = securejoin.SecureJoin( + pathWithRootAndDomainContext, + attachmentFilename, + ) + if err != nil { + a.Logger.Infow("insecure path", "error", err) + return createdIDs, errs.Wrap(err) + } + a.Logger.Debugw("file path: %s", + "relative", relativePath.String(), + "relativeWithRootFilePath", pathWithRootAndDomainContext, + ) + filePaths = append(filePaths, pathWithRootAndDomainContext) + files = append(files, NewFileUpload(pathWithRootAndDomainContext, attachment.File)) + } + // upload files to the file system + _, err = a.FileService.Upload( + g, + files, + ) + if err != nil { + a.Logger.Debugw("failed to upload files", "error", err) + return createdIDs, errs.Wrap(err) + } + // save uploaded files to the database + for _, attachment := range attachments { + _, err := a.AttachmentRepository.Insert( + g, + attachment, + ) + if err != nil { + a.Logger.Debugw("failed to save attachment", "error", err) + // TODO remove all previously uploaded files + // buut maybe not, it would be annoying if there is a multi user system + // and a user uploads a huge amount of files and one fails and does this + // repeatedly to burn the server + return createdIDs, errs.Wrap(err) + } + } + strIds := []string{} + for _, id := range createdIDs { + strIds = append(strIds, id.String()) + + } + ae.Details["paths"] = filePaths + ae.Details["ids"] = strIds + a.AuditLogAuthorized(ae) + + return createdIDs, nil +} + +// GetAll gets all attachments +func (a *Attachment) GetAll( + ctx context.Context, + session *model.Session, + companyID *uuid.UUID, + query *vo.QueryArgs, +) (*model.Result[model.Attachment], error) { + ae := NewAuditEvent("Attachment.GetAll", session) + result := model.NewEmptyResult[model.Attachment]() + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + a.LogAuthError(err) + return result, errs.Wrap(err) + } + if !isAuthorized { + a.AuditLogNotAuthorized(ae) + return result, errs.ErrAuthorizationFailed + } + // if there is no companyID then the scope is 'shared' + if companyID == nil { + result, err = a.AttachmentRepository.GetAllByGlobalContext( + ctx, + query, + ) + if err != nil && !errors.Is(err, gorm.ErrRecordNotFound) { + a.Logger.Errorw("failed to get global attachments", "error", err) + return nil, errs.Wrap(err) + } + } else { + result, err = a.AttachmentRepository.GetAllByContext( + ctx, + companyID, + query, + ) + if err != nil && !errors.Is(err, gorm.ErrRecordNotFound) { + a.Logger.Errorw("failed to get company attachments", "error", err) + return nil, errs.Wrap(err) + } + } + for _, attachment := range result.Rows { + path, err := a.GetPath(attachment) + if err != nil { + a.Logger.Debugw("failed to get path", "error", err) + return nil, errs.Wrap(err) + } + attachment.Path = nullable.NewNullableWithValue(*path) + } + // no audit on read + return result, nil +} + +// GetByID gets an attachment by id +func (a *Attachment) GetByID( + ctx context.Context, + session *model.Session, + id *uuid.UUID, +) (*model.Attachment, error) { + ae := NewAuditEvent("Attachment.GetById", session) + ae.Details["id"] = id.String() + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + a.LogAuthError(err) + return nil, errs.Wrap(err) + } + if !isAuthorized { + a.AuditLogNotAuthorized(ae) + return nil, errs.ErrAuthorizationFailed + } + // get the attachment + attachment, err := a.AttachmentRepository.GetByID( + ctx, + id, + ) + if err != nil { + a.Logger.Debugw("attachment not found", "error", err) + return nil, errs.Wrap(err) + } + // path + path, err := a.GetPath(attachment) + if err != nil { + a.Logger.Debugw("failed to get path", "error", err) + return nil, errs.Wrap(err) + } + attachment.Path = nullable.NewNullableWithValue(*path) + // no audit log on read + return attachment, nil +} + +// UpdateByID updates an attachment by id +func (a *Attachment) UpdateByID( + ctx context.Context, + session *model.Session, + id *uuid.UUID, + attachment *model.Attachment, +) error { + ae := NewAuditEvent("Attachment.UpdateById", session) + ae.Details["id"] = id.String() + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + a.LogAuthError(err) + return err + } + if !isAuthorized { + a.AuditLogNotAuthorized(ae) + return errs.ErrAuthorizationFailed + } + // get the attachment + current, err := a.AttachmentRepository.GetByID( + ctx, + id, + ) + if err != nil { + a.Logger.Debugw("attachment not found", "error", err) + return err + } + // update the attachment + if attachment.Name.IsSpecified() { + current.Name = attachment.Name + } + if attachment.Description.IsSpecified() { + current.Description = attachment.Description + } + if attachment.EmbeddedContent.IsSpecified() { + current.EmbeddedContent = attachment.EmbeddedContent + } + // validate + if err := attachment.Validate(); err != nil { + a.Logger.Debugw("failed to validate attachment", "error", err) + return err + } + // save the change + err = a.AttachmentRepository.UpdateByID( + ctx, + id, + current, + ) + if err != nil { + a.Logger.Errorw("failed to update attachment", "error", err) + return err + } + a.AuditLogAuthorized(ae) + return nil +} + +// DeleteByID deletes an attachment by id +func (a *Attachment) DeleteByID( + ctx context.Context, + session *model.Session, + id *uuid.UUID, +) error { + ae := NewAuditEvent("Attachment.DeleteById", session) + ae.Details["id"] = id.String() + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + a.LogAuthError(err) + return err + } + if !isAuthorized { + a.AuditLogNotAuthorized(ae) + return errs.ErrAuthorizationFailed + } + // get the attachment + attachment, err := a.AttachmentRepository.GetByID( + ctx, + id, + ) + if err != nil { + a.Logger.Debugw("attachment not found", "error", err) + return err + } + // delete any references to the attachments in emails + err = a.EmailRepository.RemoveAttachmentsByAttachmentID( + ctx, + id, + ) + if err != nil { + a.Logger.Debugw("failed to delete attachment references", "error", err) + return err + } + // delete the file + companyContext := data.ASSET_GLOBAL_FOLDER + if attachment.CompanyID.IsSpecified() && !attachment.CompanyID.IsNull() { + companyContext = attachment.CompanyID.MustGet().String() + } + domainRoot, err := securejoin.SecureJoin(a.RootFolder, companyContext) + if err != nil { + a.Logger.Debugw("insecure path", "error", err) + return err + } + attachmentFileName := attachment.FileName.MustGet() + filename, err := securejoin.SecureJoin(domainRoot, attachmentFileName.String()) + if err != nil { + + a.Logger.Debugw("insecure path", "error", err) + return err + } + err = a.FileService.Delete( + filename, + ) + if err != nil { + a.Logger.Debugw("failed to delete attachment file", "error", err) + return err + } + // delete the attachment from the database + err = a.AttachmentRepository.DeleteByID( + ctx, + id, + ) + if err != nil { + a.Logger.Errorw("failed to delete attachment from database but the file is deleted", + "error", err, + ) + return err + } + a.AuditLogAuthorized(ae) + return nil +} + +func (a *Attachment) GetPath(attachment *model.Attachment) (*vo.RelativeFilePath, error) { + // path + contextFolder := "" + if !attachment.CompanyID.IsSpecified() || attachment.CompanyID.IsNull() { + contextFolder = data.ASSET_GLOBAL_FOLDER + } else { + companyID := attachment.CompanyID.MustGet().String() + contextFolder = companyID + } + // map attachments to files + attachmentFilename := filepath.Clean(attachment.FileName.MustGet().String()) + // relative path is used in the DB + /* + relativePath, err := vo.NewRelativeFilePath(attachmentFilename) + if err != nil { + a.Logger.Debugw("failed to make file path", err) + return nil,errs.Wrap(err) + } + */ + // full path is used in the file system + pathWithRootAndDomainContext, err := securejoin.SecureJoin(a.RootFolder, contextFolder) + if err != nil { + a.Logger.Infow("insecure path", "error", err) + return nil, errs.Wrap(err) + } + pathWithRootAndDomainContext, err = securejoin.SecureJoin( + pathWithRootAndDomainContext, + attachmentFilename, + ) + if err != nil { + a.Logger.Infow("insecure path", "error", err) + return nil, errs.Wrap(err) + } + path, err := vo.NewRelativeFilePath(pathWithRootAndDomainContext) + if err != nil { + a.Logger.Debugw("failed to make file path", "error", err) + return nil, errs.Wrap(err) + } + return path, nil +} diff --git a/backend/service/campaign.go b/backend/service/campaign.go new file mode 100644 index 0000000..d5cf5d5 --- /dev/null +++ b/backend/service/campaign.go @@ -0,0 +1,3171 @@ +package service + +import ( + "context" + "crypto/tls" + "errors" + "fmt" + "html/template" + "math/rand" + "os" + "path/filepath" + "slices" + "sort" + "strings" + "time" + + go_errors "github.com/go-errors/errors" + + "github.com/gin-gonic/gin" + "github.com/google/uuid" + "github.com/oapi-codegen/nullable" + "github.com/phishingclub/phishingclub/build" + "github.com/phishingclub/phishingclub/cache" + "github.com/phishingclub/phishingclub/data" + "github.com/phishingclub/phishingclub/database" + "github.com/phishingclub/phishingclub/errs" + "github.com/phishingclub/phishingclub/model" + "github.com/phishingclub/phishingclub/repository" + "github.com/phishingclub/phishingclub/validate" + "github.com/phishingclub/phishingclub/vo" + "github.com/wneessen/go-mail" + "gorm.io/gorm" +) + +// Campaign is the Campaign service +type Campaign struct { + Common + CampaignRepository *repository.Campaign + CampaignRecipientRepository *repository.CampaignRecipient + RecipientRepository *repository.Recipient + RecipientGroupRepository *repository.RecipientGroup + AllowDenyRepository *repository.AllowDeny + WebhookRepository *repository.Webhook + CampaignTemplateService *CampaignTemplate + TemplateService *Template + DomainService *Domain + RecipientService *Recipient + MailService *Email + APISenderService *APISender + SMTPConfigService *SMTPConfiguration + WebhookService *Webhook + AttachmentPath string +} + +// Create creates a new campaign +func (c *Campaign) Create( + ctx context.Context, + session *model.Session, + campaign *model.Campaign, +) (*uuid.UUID, error) { + ae := NewAuditEvent("Campaign.Create", session) + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + c.LogAuthError(err) + return nil, errs.Wrap(err) + } + if !isAuthorized { + c.AuditLogNotAuthorized(ae) + return nil, errs.ErrAuthorizationFailed + } + if len(campaign.RecipientGroupIDs) == 0 { + return nil, validate.WrapErrorWithField(errors.New("no groups provided"), "Recipient Groups") + } + // if the schedule type is scheduled, set the start time to start of day and end to the end of the last day + if campaign.ConstraintWeekDays.IsSpecified() && !campaign.ConstraintWeekDays.IsNull() { + if err := campaign.ValidateSendTimesSet(); err != nil { + return nil, errs.Wrap(err) + } + if err := c.updateSchedulesCampaignStartAndEndDates(campaign); err != nil { + return nil, errs.Wrap(err) + } + } + // validate + if err := campaign.Validate(); err != nil { + return nil, errs.Wrap(err) + } + // check the template is usable + templateID := campaign.TemplateID.MustGet() + cTemplate, err := c.CampaignTemplateService.GetByID( + ctx, + session, + &templateID, + &repository.CampaignTemplateOption{ + UsableOnly: true, + }, + ) + if err != nil { + return nil, errs.Wrap(err) + } + if cTemplate == nil { + return nil, errors.New("attempted to create campaign with unusable template") + } + // check uniqueness + var companyID *uuid.UUID + if cid, err := campaign.CompanyID.Get(); err == nil { + companyID = &cid + } + name := campaign.Name.MustGet() + isOK, err := repository.CheckNameIsUnique( + ctx, + c.CampaignRepository.DB, + "campaigns", + name.String(), + companyID, + nil, + ) + if err != nil { + c.Logger.Errorw("failed to check campaign uniqueness", "error", err) + return nil, errs.Wrap(err) + } + if !isOK { + c.Logger.Debugw("campaign name is already taken", "error", name.String()) + return nil, validate.WrapErrorWithField(errors.New("is not unique"), "name") + } + // validate allow deny list selections + if err := c.validateAllowDenyIsSameTypeByIDs(ctx, campaign); err != nil { + return nil, errs.Wrap(err) + } + // check there is atleast one valid group + // and remove any empty groups + validGroups := []*uuid.UUID{} + for _, groupID := range campaign.RecipientGroupIDs.MustGet() { + count, err := c.RecipientGroupRepository.GetRecipientCount(ctx, groupID) + if err != nil { + return nil, errs.Wrap(err) + } + if count > 0 { + validGroups = append(validGroups, groupID) + } + } + if len(validGroups) == 0 { + return nil, errs.NewValidationError( + errors.New("Selected groups have no recipients"), + ) + } + campaign.RecipientGroupIDs.Set(validGroups) + // save + id, err := c.CampaignRepository.Insert(ctx, campaign) + if err != nil { + c.Logger.Errorw("failed to create campaign", "error", err) + return nil, errs.Wrap(err) + } + createdCampaign, err := c.CampaignRepository.GetByID( + ctx, + id, + &repository.CampaignOption{ + WithRecipientGroups: true, + }, + ) + if err != nil { + c.Logger.Errorw("failed to get campaign by id", "error", err) + return nil, errs.Wrap(err) + } + err = c.schedule(ctx, session, createdCampaign) + if err != nil { + c.Logger.Errorw("failed to schedule campaign", "error", err) + // TODO we should delete the campaign as it was not scheduled + return nil, errs.Wrap(err) + } + ae.Details["id"] = id.String() + c.AuditLogAuthorized(ae) + return id, nil +} + +// schedule campaign schedules the campaign +// this is a service method that does not perform auth, use with consideration +func (c *Campaign) schedule( + ctx context.Context, + session *model.Session, + campaign *model.Campaign, +) error { + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + c.Logger.Errorw("failed to create campaign", "error", err) + return errs.Wrap(err) + } + if !isAuthorized { + return errs.ErrAuthorizationFailed + } + + // get all recipients and remove duplicates + recipients := []*model.Recipient{} + dubMap := map[string]bool{} + if campaign.RecipientGroupIDs.IsSpecified() && !campaign.RecipientGroupIDs.IsNull() { + for _, groupID := range campaign.RecipientGroupIDs.MustGet() { + group, err := c.RecipientGroupRepository.GetByID( + ctx, + groupID, + &repository.RecipientGroupOption{ + WithRecipients: true, + }, + ) + if err != nil { + c.Logger.Errorw("failed to get recipient group by id", "error", err) + return errs.Wrap(err) + } + recps := group.Recipients + if recps == nil { + c.Logger.Error("recipient group did not load recipients") + return errors.New("recipient group did not load recipients") + } + // collect all and remove duplicates + for _, recp := range recps { + id := recp.ID.MustGet().String() + if _, ok := dubMap[id]; ok { + continue + } + dubMap[id] = true + recipients = append(recipients, recp) + } + } + } + // handle self managed campaign + // if this is a self-managed campaign, we must not remove existing + // campaign-recipients when rescheduling as this would give them new IDs + // which would mean previous sent links will not work anymore. + // instead we must only add new recipients and remove the ones that are no longer in the recipient groups + if campaign.IsSelfManaged() { + if err := campaign.ValidateNoSendTimesSet(); err != nil { + return errs.Wrap(err) + } + // sort by email when self managed + sort.Slice(recipients, func(a, b int) bool { + if v, err := recipients[a].Email.Get(); err == nil { + if v2, err := recipients[b].Email.Get(); err == nil { + return strings.ToLower(v.String()) > strings.ToLower(v2.String()) + } + } + return false + }) + campaignID := campaign.ID.MustGet() + // remove campaign-recipients that are not supplied in a re-schedule + recipientIDs := make([]*uuid.UUID, len(recipients)) + for i, recp := range recipients { + rid := recp.ID.MustGet() + recipientIDs[i] = &rid + } + // c.Logger.Debugw("keeping recpient IDs", recipientIDs) + err := c.CampaignRecipientRepository.DeleteRecipientsNotIn( + ctx, + &campaignID, + recipientIDs, + ) + if err != nil { + c.Logger.Errorw("failed to delete campaign recipients", "error", err) + return errs.Wrap(err) + } + // insert campaign-recipients that are not already in the schedule + campaignRecipients := make([]*model.CampaignRecipient, len(recipients)) + for i, recipient := range recipients { + // check if already exists + rid := recipient.ID.MustGet() + _, err := c.CampaignRecipientRepository.GetByCampaignAndRecipientID( + ctx, + &campaignID, + &rid, + &repository.CampaignRecipientOption{}, + ) + if err != nil && !errors.Is(err, gorm.ErrRecordNotFound) { + c.Logger.Errorw("failed to get campaign recipient by campaign and recipient id", + "error", err, + ) + return errs.Wrap(err) + } + // if exists, skip it + if !errors.Is(err, gorm.ErrRecordNotFound) { + continue + } + recpID := nullable.NewNullableWithValue(recipient.ID.MustGet()) + campaignRecipients[i] = &model.CampaignRecipient{ + RecipientID: recpID, + CampaignID: nullable.NewNullableWithValue(campaignID), + SelfManaged: nullable.NewNullableWithValue(true), + } + // save campaign-recipient + _, err = c.CampaignRecipientRepository.Insert(ctx, campaignRecipients[i]) + if err != nil { + c.Logger.Errorw("failed to create campaign", "error", err) + return errs.Wrap(err) + } + } + err = c.setMostNotableCampaignEvent( + ctx, + campaign, + data.EVENT_CAMPAIGN_SELF_MANAGED, + ) + if err != nil { + // err is logged in method call + return errs.Wrap(err) + } + + return nil + } + if err := campaign.ValidateSendTimesSet(); err != nil { + return errs.Wrap(err) + } + // set schedule with a even spread between startAt and end time + startAt := campaign.SendStartAt.MustGet() + endAt := campaign.SendEndAt.MustGet() + recipientsCount := len(recipients) + // no recipients, no schedule + if recipientsCount == 0 { + return fmt.Errorf("no recipients to schedule") + } + campaignRecipients := make([]*model.CampaignRecipient, recipientsCount) + // sort the recipients by the selected sort field and order + sortOrder := campaign.SortOrder.MustGet().String() + sortField := campaign.SortField.MustGet().String() + recipients = sortRecipients(recipients, sortOrder, sortField) + // schedule the emails + if recipientsCount == 0 { + return fmt.Errorf("no recipients to schedule for '%s'", campaign.Name.MustGet()) + } + scheduledEvent := cache.EventIDByName[data.EVENT_CAMPAIGN_RECIPIENT_SCHEDULED] + // handle single recipient + if recipientsCount == 1 { + recpID := nullable.NewNullableWithValue(recipients[0].ID.MustGet()) + campaignID := nullable.NewNullableWithValue(campaign.ID.MustGet()) + campaignRecipient := &model.CampaignRecipient{ + RecipientID: recpID, + CampaignID: campaignID, + SendAt: nullable.NewNullableWithValue(startAt), + NotableEventID: nullable.NewNullableWithValue(*scheduledEvent), + } + _, err := c.CampaignRecipientRepository.Insert(ctx, campaignRecipient) + if err != nil { + c.Logger.Errorw("failed to create campaign", "error", err) + return errs.Wrap(err) + } + err = c.setMostNotableCampaignEvent( + ctx, + campaign, + data.EVENT_CAMPAIGN_SCHEDULED, + ) + if err != nil { + // err is logged in method call + return errs.Wrap(err) + } + return nil + } + // handle delivery schedule with specific week days + // it is when there a start and end date, specific weekdays and times ranges to send in. + if campaign.ConstraintWeekDays.IsSpecified() && !campaign.ConstraintWeekDays.IsNull() { + c.Logger.Debugw("schedule campaign with contraints", + "campaignName", campaign.Name.MustGet(), + ) + campaignDuration := time.Duration(0) + // first we calculate the diff, so we know the offset for we need to push between each in range moment + currentDate := startAt + toFind := campaign.ConstraintWeekDays.MustGet().AsSlice() + // iterate over each day in the period + for currentDate.Before(endAt) || currentDate.Equal(endAt) { + // check if the current day is in the week days + if slices.Contains(toFind, int(currentDate.Weekday())) { + // calculate the number of minutes the campaigns spans on this week day + dayStartTime := campaign.ConstraintStartTime.MustGet() + dayEndTime := campaign.ConstraintEndTime.MustGet() + diff := dayStartTime.DiffMinutes(dayEndTime) + campaignDuration += diff + + } + currentDate = currentDate.AddDate(0, 0, 1) + } + // interval is the minutes between each recipient schedule + //interval := int(campaignDuration.Minutes()) / (recipientsCount - 1) // -1 as the first send it placed at the start time + interval := time.Duration(campaignDuration.Nanoseconds() / int64(recipientsCount-1)) + dayStartTime := campaign.ConstraintStartTime.MustGet() + dayEndTime := campaign.ConstraintEndTime.MustGet() + // schedule each recipient + // iterate through the time again and progess on each interval until all recipients are set + currentDate = startAt + c.Logger.Debugw("campaign interval", "interval", interval) + for currentDate.Before(endAt) || currentDate.Equal(endAt) { + c.Logger.Debugw("schedule check date", "currentDate", currentDate) + // check if the current day is in the week days + if slices.Contains(toFind, int(currentDate.Weekday())) { + c.Logger.Debugw("scheduling date", "currentDate", currentDate) + // iterate over the hours in the day and jump each interval + // if over the end time, break and skip to next day, saving the surplus of interval minutes + // to be added to next send + currentDayStart := currentDate.Truncate(24 * time.Hour).Add(dayStartTime.Minutes()) + currentDayEnd := currentDate.Truncate(24 * time.Hour).Add(dayEndTime.Minutes()) + for currentDayStart.Before(currentDayEnd) || currentDayStart.Equal(currentDayEnd) { + c.Logger.Debugw("scheduling date at", "currentDayStart", currentDayStart) + // check if we have any recipients left + if len(recipients) == 0 { + break + } + // get the next recipient + recipient := recipients[0] + recipients = recipients[1:] + // save + campaignRecipient := &model.CampaignRecipient{ + RecipientID: recipient.ID, + CampaignID: campaign.ID, + SendAt: nullable.NewNullableWithValue(currentDayStart), + NotableEventID: nullable.NewNullableWithValue(*scheduledEvent), + } + _, err := c.CampaignRecipientRepository.Insert(ctx, campaignRecipient) + if err != nil { + c.Logger.Errorw("failed to create campaign", "error", err) + return errs.Wrap(err) + } + // check if we are over the end time + currentDayStart = currentDayStart.Add(interval * time.Duration(1)) + + } + } + // check the next day within the start and end date range + currentDate = currentDate.AddDate(0, 0, 1) + } + err = c.setMostNotableCampaignEvent( + ctx, + campaign, + data.EVENT_CAMPAIGN_SCHEDULED, + ) + if err != nil { + // err is logged in method call + return errs.Wrap(err) + } + + return nil + } + + // handle basic delivery schedule + // it is when there is no constraints, equal distribution between start and end datetime + campaignDuration := endAt.Sub(startAt) + // Calculate interval between emails + // TODO make this work in minutes + interval := time.Duration(campaignDuration.Nanoseconds() / int64(recipientsCount-1)) + for i, recipient := range recipients { + sentAt := startAt + if i > 0 { + sa := campaignRecipients[i-1].SendAt.MustGet().Add(interval * time.Duration(1)) + sentAt = sa + } + // todo perhaps this array is unnecesssary + //recpID := recipient.ID.MustGet() + //campaignID := campaign.ID.MustGet() + campaignRecipients[i] = &model.CampaignRecipient{ + RecipientID: recipient.ID, + CampaignID: campaign.ID, + SendAt: nullable.NewNullableWithValue(sentAt), + NotableEventID: nullable.NewNullableWithValue(*scheduledEvent), + } + // save + _, err = c.CampaignRecipientRepository.Insert(ctx, campaignRecipients[i]) + if err != nil { + c.Logger.Errorw("failed to create campaign", "error", err) + return errs.Wrap(err) + } + } + err = c.setMostNotableCampaignEvent( + ctx, + campaign, + data.EVENT_CAMPAIGN_SCHEDULED, + ) + if err != nil { + // err is logged in method call + return errs.Wrap(err) + } + return nil +} + +// sortRecipients sorts the recipients by the selected sort field and order +func sortRecipients(recipients []*model.Recipient, sortOrder, sortField string) []*model.Recipient { + switch sortOrder { + case "random": + sort.Slice(recipients, func(i, j int) bool { + // return a random bool + // #nosec + return rand.Float32() < 0.5 + }) + case "desc": + sort.Slice(recipients, func(a, b int) bool { + // TODO implements the rest of the fields + switch sortField { + case "email": + if v, err := recipients[a].Email.Get(); err == nil { + if v2, err := recipients[b].Email.Get(); err == nil { + return strings.ToLower(v.String()) > strings.ToLower(v2.String()) + } + } + return false + case "first_name": + if v, err := recipients[a].FirstName.Get(); err == nil { + if v2, err := recipients[b].FirstName.Get(); err == nil { + return strings.ToLower(v.String()) > strings.ToLower(v2.String()) + } + } + return false + case "last_name": + if v, err := recipients[a].LastName.Get(); err == nil { + if v2, err := recipients[b].LastName.Get(); err == nil { + return strings.ToLower(v.String()) > strings.ToLower(v2.String()) + } + } + return false + case "phone": + if v, err := recipients[a].Phone.Get(); err == nil { + if v2, err := recipients[b].Phone.Get(); err == nil { + return strings.ToLower(v.String()) > strings.ToLower(v2.String()) + } + } + return false + case "position": + if v, err := recipients[a].Position.Get(); err == nil { + if v2, err := recipients[b].Position.Get(); err == nil { + return strings.ToLower(v.String()) > strings.ToLower(v2.String()) + } + } + return false + case "department": + if v, err := recipients[a].Department.Get(); err == nil { + if v2, err := recipients[b].Department.Get(); err == nil { + return strings.ToLower(v.String()) > strings.ToLower(v2.String()) + } + } + return false + case "city": + if v, err := recipients[a].City.Get(); err == nil { + if v2, err := recipients[b].City.Get(); err == nil { + return strings.ToLower(v.String()) > strings.ToLower(v2.String()) + } + } + return false + case "country": + if v, err := recipients[a].Country.Get(); err == nil { + if v2, err := recipients[b].Country.Get(); err == nil { + return strings.ToLower(v.String()) > strings.ToLower(v2.String()) + } + } + return false + case "misc": + if v, err := recipients[a].Misc.Get(); err == nil { + if v2, err := recipients[b].Misc.Get(); err == nil { + return strings.ToLower(v.String()) > strings.ToLower(v2.String()) + } + } + return false + case "extraID": + if v, err := recipients[a].ExtraIdentifier.Get(); err == nil { + if v2, err := recipients[b].ExtraIdentifier.Get(); err == nil { + return strings.ToLower(v.String()) > strings.ToLower(v2.String()) + } + } + return false + default: + panic("unknown sort field") + } + }) + case "asc": + sort.Slice(recipients, func(a, b int) bool { + switch sortField { + case "email": + if v, err := recipients[a].Email.Get(); err == nil { + if v2, err := recipients[b].Email.Get(); err == nil { + return strings.ToLower(v.String()) < strings.ToLower(v2.String()) + } + } + return false + case "firstName": + if v, err := recipients[a].FirstName.Get(); err == nil { + if v2, err := recipients[b].FirstName.Get(); err == nil { + return strings.ToLower(v.String()) < strings.ToLower(v2.String()) + } + } + return false + case "lastName": + if v, err := recipients[a].LastName.Get(); err == nil { + if v2, err := recipients[b].LastName.Get(); err == nil { + return strings.ToLower(v.String()) < strings.ToLower(v2.String()) + } + } + return false + case "phone": + if v, err := recipients[a].Phone.Get(); err == nil { + if v2, err := recipients[b].Phone.Get(); err == nil { + return strings.ToLower(v.String()) < strings.ToLower(v2.String()) + } + } + return false + case "position": + if v, err := recipients[a].Position.Get(); err == nil { + if v2, err := recipients[b].Position.Get(); err == nil { + return strings.ToLower(v.String()) < strings.ToLower(v2.String()) + } + } + return false + case "department": + if v, err := recipients[a].Department.Get(); err == nil { + if v2, err := recipients[b].Department.Get(); err == nil { + return strings.ToLower(v.String()) < strings.ToLower(v2.String()) + } + } + return false + case "city": + if v, err := recipients[a].City.Get(); err == nil { + if v2, err := recipients[b].City.Get(); err == nil { + return strings.ToLower(v.String()) < strings.ToLower(v2.String()) + } + } + return false + case "country": + if v, err := recipients[a].Country.Get(); err == nil { + if v2, err := recipients[b].Country.Get(); err == nil { + return strings.ToLower(v.String()) < strings.ToLower(v2.String()) + } + } + return false + case "misc": + if v, err := recipients[a].Misc.Get(); err == nil { + if v2, err := recipients[b].Misc.Get(); err == nil { + return strings.ToLower(v.String()) < strings.ToLower(v2.String()) + } + } + return false + case "extraID": + if v, err := recipients[a].ExtraIdentifier.Get(); err == nil { + if v2, err := recipients[b].ExtraIdentifier.Get(); err == nil { + return strings.ToLower(v.String()) < strings.ToLower(v2.String()) + } + } + return false + default: + panic("unknown sort field") + } + }) + } + return recipients +} + +// GetByID gets a campaign by its id +func (c *Campaign) GetByID( + ctx context.Context, + session *model.Session, + id *uuid.UUID, + options *repository.CampaignOption, +) (*model.Campaign, error) { + ae := NewAuditEvent("Campaign.GetById", session) + ae.Details["id"] = id.String() + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + c.LogAuthError(err) + return nil, errs.Wrap(err) + } + if !isAuthorized { + c.AuditLogNotAuthorized(ae) + return nil, errs.ErrAuthorizationFailed + } + campaign, err := c.CampaignRepository.GetByID(ctx, id, options) + if err != nil { + c.Logger.Errorw("failed to get campaign by id", "error", err) + return nil, errs.Wrap(err) + } + // no audit on read + return campaign, nil +} + +// GetByName gets a campaign by its name +func (c *Campaign) GetByName( + ctx context.Context, + session *model.Session, + name string, + companyID *uuid.UUID, + options *repository.CampaignOption, +) (*model.Campaign, error) { + ae := NewAuditEvent("Campaign.GetByName", session) + ae.Details["name"] = name + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + c.LogAuthError(err) + return nil, errs.Wrap(err) + } + if !isAuthorized { + c.AuditLogNotAuthorized(ae) + return nil, errs.ErrAuthorizationFailed + } + campaign, err := c.CampaignRepository.GetByNameAndCompanyID(ctx, name, companyID, options) + if err != nil { + c.Logger.Errorw("failed to get campaign by name", "error", err) + return nil, errs.Wrap(err) + } + // no audit on read + return campaign, nil +} + +// GetByCompanyID gets a campaigns by it company id +func (c *Campaign) GetByCompanyID( + ctx context.Context, + session *model.Session, + companyID *uuid.UUID, + options *repository.CampaignOption, +) (*model.Result[model.Campaign], error) { + result := model.NewEmptyResult[model.Campaign]() + ae := NewAuditEvent("Campaign.GetByCompanyID", session) + if companyID != nil { + ae.Details["companyId"] = companyID.String() + } + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + c.LogAuthError(err) + return result, errs.Wrap(err) + } + if !isAuthorized { + c.AuditLogNotAuthorized(ae) + return result, errs.ErrAuthorizationFailed + } + result, err = c.CampaignRepository.GetAllByCompanyID(ctx, companyID, options) + if err != nil { + c.Logger.Errorw("failed to get campaigns by company id", "error", err) + return nil, errs.Wrap(err) + } + // no audit on read + return result, nil +} + +// GetStats gets stats for a campaign +// if no company id is given it retrieves stats for global including all companies +func (c *Campaign) GetStats( + ctx context.Context, + session *model.Session, + companyID *uuid.UUID, +) (*model.CampaignsStatView, error) { + ae := NewAuditEvent("Campaign.GetStats", session) + if companyID != nil { + ae.Details["companyID"] = companyID.String() + } + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + c.LogAuthError(err) + return nil, errs.Wrap(err) + } + if !isAuthorized { + c.AuditLogNotAuthorized(ae) + return nil, errs.ErrAuthorizationFailed + } + // get stats + active, err := c.CampaignRepository.GetActiveCount(ctx, companyID) + if err != nil { + return nil, errs.Wrap(err) + } + upcoming, err := c.CampaignRepository.GetUpcomingCount(ctx, companyID) + if err != nil { + return nil, errs.Wrap(err) + } + finished, err := c.CampaignRepository.GetFinishedCount(ctx, companyID) + if err != nil { + return nil, errs.Wrap(err) + } + // no audit on read + return &model.CampaignsStatView{ + Active: active, + Upcoming: upcoming, + Finished: finished, + }, nil +} + +// GetResultStats gets results stats for a campaign +func (c *Campaign) GetResultStats( + ctx context.Context, + session *model.Session, + campaignID *uuid.UUID, +) (*model.CampaignResultView, error) { + ae := NewAuditEvent("Campaign.GetResultStats", session) + ae.Details["campaignId"] = campaignID.String() + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + c.LogAuthError(err) + return nil, errs.Wrap(err) + } + if !isAuthorized { + c.AuditLogNotAuthorized(ae) + return nil, errs.ErrAuthorizationFailed + } + // get stats + stats, err := c.CampaignRepository.GetResultStats(ctx, campaignID) + if err != nil { + c.Logger.Errorw("failed to get campaign results statistics", "error", err) + return nil, errs.Wrap(err) + } + // no audit on read + return stats, nil +} + +// GetRecipientsByCampaignID gets all recipients for a campaign +func (c *Campaign) GetRecipientsByCampaignID( + ctx context.Context, + session *model.Session, + campaignID *uuid.UUID, + options *repository.CampaignRecipientOption, +) ([]*model.CampaignRecipient, error) { + ae := NewAuditEvent("Campaign.GetRecipientsByCampaignID", session) + ae.Details["campaignId"] = campaignID.String() + recipients := []*model.CampaignRecipient{} + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + c.LogAuthError(err) + return recipients, errs.Wrap(err) + } + if !isAuthorized { + c.AuditLogNotAuthorized(ae) + return nil, errs.ErrAuthorizationFailed + } + // get all recipients + if options.OrderBy == "" { + options.OrderBy = "campaign_recipients.sent_at" + } + recipients, err = c.CampaignRecipientRepository.GetByCampaignID( + ctx, + campaignID, + options, + ) + if err != nil { + c.Logger.Errorw("failed to get recipients by campaign id", "error", err) + return recipients, errs.Wrap(err) + } + // no audit on read + return recipients, nil +} + +// GetEventsByCampaignID gets all events for a campaign +func (c *Campaign) GetEventsByCampaignID( + ctx context.Context, + session *model.Session, + campaignID *uuid.UUID, + queryArgs *vo.QueryArgs, + since *time.Time, + eventTypeIDs []string, +) (*model.Result[model.CampaignEvent], error) { + result := model.NewEmptyResult[model.CampaignEvent]() + ae := NewAuditEvent("Campaign.GetEventsByCampaignID", session) + ae.Details["campaignId"] = campaignID.String() + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + c.LogAuthError(err) + return result, errs.Wrap(err) + } + if !isAuthorized { + c.AuditLogNotAuthorized(ae) + return result, errs.ErrAuthorizationFailed + } + result, err = c.CampaignRepository.GetEventsByCampaignID( + ctx, + campaignID, + &repository.CampaignEventOption{ + QueryArgs: queryArgs, + WithUser: true, + EventTypeIDs: eventTypeIDs, + }, + since, + ) + if err != nil { + c.Logger.Errorw("failed to get events by campaign id", "error", err) + return result, errs.Wrap(err) + } + // no audit on read + return result, nil +} + +// GetAll gets all campaigns using pagination +func (c *Campaign) GetAll( + ctx context.Context, + session *model.Session, + companyID *uuid.UUID, + options *repository.CampaignOption, +) (*model.Result[model.Campaign], error) { + result := model.NewEmptyResult[model.Campaign]() + ae := NewAuditEvent("Campaign.GetAll", session) + if companyID != nil { + ae.Details["companyID"] = companyID.String() + } + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + c.LogAuthError(err) + return result, errs.Wrap(err) + } + if !isAuthorized { + c.AuditLogNotAuthorized(ae) + return result, errs.ErrAuthorizationFailed + } + result, err = c.CampaignRepository.GetAll( + ctx, + companyID, + options, + ) + if err != nil { + c.Logger.Errorw("failed to get all campaigns", "error", err) + return result, errs.Wrap(err) + } + // no audit on read + return result, nil +} + +// GetAllWithinDates gets all campaigns active, scheduled or self managed within dates +func (c *Campaign) GetAllWithinDates( + ctx context.Context, + session *model.Session, + startDate time.Time, + endDate time.Time, + companyID *uuid.UUID, + options *repository.CampaignOption, +) (*model.Result[model.Campaign], error) { + result := model.NewEmptyResult[model.Campaign]() + ae := NewAuditEvent("Campaign.GetAllWithinDates", session) + if companyID != nil { + ae.Details["companyID"] = companyID.String() + } + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + c.LogAuthError(err) + return result, errs.Wrap(err) + } + if !isAuthorized { + c.AuditLogNotAuthorized(ae) + return result, errs.ErrAuthorizationFailed + } + result, err = c.CampaignRepository.GetAllCampaignWithinDates( + ctx, + companyID, + startDate, + endDate, + options, + ) + if err != nil { + c.Logger.Errorw("failed to get all campaigns between dates", "error", err) + return result, errs.Wrap(err) + } + // no audit on read + return result, nil +} + +// GetAllActive gets all active campaigns using pagination +func (c *Campaign) GetAllActive( + ctx context.Context, + session *model.Session, + companyID *uuid.UUID, + options *repository.CampaignOption, +) (*model.Result[model.Campaign], error) { + result := model.NewEmptyResult[model.Campaign]() + ae := NewAuditEvent("Campaign.GetAllActive", session) + if companyID != nil { + ae.Details["companyID"] = companyID.String() + } + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + c.LogAuthError(err) + return result, errs.Wrap(err) + } + if !isAuthorized { + c.AuditLogNotAuthorized(ae) + return result, errs.ErrAuthorizationFailed + } + result, err = c.CampaignRepository.GetAllActive( + ctx, + companyID, + options, + ) + if err != nil { + c.Logger.Errorw("failed to get all active campaigns", "error", err) + return result, errs.Wrap(err) + } + // no audit on read + return result, nil +} + +// GetAllUpcoming gets all upcoming campaigns using pagination +func (c *Campaign) GetAllUpcoming( + ctx context.Context, + session *model.Session, + companyID *uuid.UUID, + options *repository.CampaignOption, +) (*model.Result[model.Campaign], error) { + result := model.NewEmptyResult[model.Campaign]() + ae := NewAuditEvent("Campaign.GetAllUpcoming", session) + if companyID != nil { + ae.Details["companyId"] = companyID.String() + } + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + c.LogAuthError(err) + return result, errs.Wrap(err) + } + if !isAuthorized { + c.AuditLogNotAuthorized(ae) + return result, errs.ErrAuthorizationFailed + } + result, err = c.CampaignRepository.GetAllUpcoming( + ctx, + companyID, + options, + ) + if err != nil { + c.Logger.Errorw("failed to get all upcoming campaigns", "error", err) + return result, errs.Wrap(err) + } + // no audit on read + return result, nil +} + +// GetAllFinished gets all finished campaigns using pagination +func (c *Campaign) GetAllFinished( + ctx context.Context, + session *model.Session, + companyID *uuid.UUID, + options *repository.CampaignOption, +) (*model.Result[model.Campaign], error) { + result := model.NewEmptyResult[model.Campaign]() + ae := NewAuditEvent("Campaign.GetAllFinished", session) + if companyID != nil { + ae.Details["companyId"] = companyID.String() + } + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + c.LogAuthError(err) + return result, errs.Wrap(err) + } + if !isAuthorized { + c.AuditLogNotAuthorized(ae) + return result, errs.ErrAuthorizationFailed + } + result, err = c.CampaignRepository.GetAllFinished( + ctx, + companyID, + options, + ) + if err != nil { + c.Logger.Errorw("failed to get all finished campaigns", "error", err) + return result, errs.Wrap(err) + } + // no audit on read + return result, nil +} + +// SaveTrackingPixelLoaded saves the tracking pixel event for a campaign recipient +// no permissions to check - this endpoint is public +// only a campaign recipient id is required +func (c *Campaign) SaveTrackingPixelLoaded( + ctx *gin.Context, + campaignRecipientID *uuid.UUID, +) error { + // get the campaign campaignRecipient + campaignRecipient, err := c.CampaignRecipientRepository.GetByCampaignRecipientID( + ctx.Request.Context(), + campaignRecipientID, + ) + if err != nil { + c.Logger.Errorw("failed to get campaign recipient by id", "error", err) + return errs.Wrap(err) + } + recipientID := campaignRecipient.RecipientID.MustGet() + campaignID := campaignRecipient.CampaignID.MustGet() + + campaign, err := c.CampaignRepository.GetByID( + ctx, + &campaignID, + &repository.CampaignOption{}, + ) + if err != nil { + c.Logger.Errorw("failed to get campaign by id", "error", err) + return errs.Wrap(err) + } + trackingPixelLoadedEventID := cache.EventIDByName[data.EVENT_CAMPAIGN_RECIPIENT_MESSAGE_READ] + newEventID := uuid.New() + var campaignEvent *model.CampaignEvent + if campaign.IsAnonymous.MustGet() { + userAgent := vo.NewEmptyOptionalString255() + campaignEvent = &model.CampaignEvent{ + ID: &newEventID, + CampaignID: &campaignID, + RecipientID: nil, + IP: vo.NewEmptyOptionalString64(), + UserAgent: userAgent, + EventID: trackingPixelLoadedEventID, + Data: vo.NewOptionalString1MBMust(""), + } + } else { + ip := vo.NewOptionalString64Must(ctx.ClientIP()) + ua := ctx.Request.UserAgent() + if len(ua) > 255 { + ua = strings.TrimSpace(ua[:255]) + } + userAgent := vo.NewOptionalString255Must(ua) + campaignEvent = &model.CampaignEvent{ + ID: &newEventID, + CampaignID: &campaignID, + RecipientID: &recipientID, + IP: ip, + UserAgent: userAgent, + EventID: cache.EventIDByName[data.EVENT_CAMPAIGN_RECIPIENT_MESSAGE_READ], + Data: vo.NewOptionalString1MBMust(""), + } + } + err = c.CampaignRepository.SaveEvent(ctx, campaignEvent) + if err != nil { + c.Logger.Errorw("failed to save tracking pixel loaded event", "error", err) + return errs.Wrap(err) + } + // handle most notable event + err = c.SetNotableCampaignRecipientEvent( + ctx, + campaignRecipient, + data.EVENT_CAMPAIGN_RECIPIENT_MESSAGE_READ, + ) + if err != nil { + // logging was done in the previous call + return errs.Wrap(err) + } + // handle webhook + webhookID, err := c.CampaignRepository.GetWebhookIDByCampaignID(ctx, &campaignID) + if err != nil && !errors.Is(err, gorm.ErrRecordNotFound) { + c.Logger.Errorw("failed to get webhook id by campaign id", "error", err) + return errs.Wrap(err) + } + if errors.Is(err, gorm.ErrRecordNotFound) || webhookID == nil { + return nil + } + err = c.HandleWebhook( + ctx, + webhookID, + &campaignID, + &recipientID, + data.EVENT_CAMPAIGN_RECIPIENT_MESSAGE_READ, + ) + if err != nil { + return errs.Wrap(err) + } + return nil +} + +// UpdateByID updates a campaign by id +func (c *Campaign) UpdateByID( + ctx context.Context, + session *model.Session, + id *uuid.UUID, + incoming *model.Campaign, +) error { + ae := NewAuditEvent("Campaign.UpdateById", session) + ae.Details["id"] = id.String() + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + c.LogAuthError(err) + return errs.Wrap(err) + } + if !isAuthorized { + c.AuditLogNotAuthorized(ae) + return errs.ErrAuthorizationFailed + } + if len(incoming.RecipientGroupIDs) == 0 { + return validate.WrapErrorWithField(errors.New("no groups provided"), "Recipient Groups") + } + // if the schedule type is scheduled, set the start time to start of day and end to the end of the last day + if incoming.ConstraintWeekDays.IsSpecified() && !incoming.ConstraintWeekDays.IsNull() { + if err := incoming.ValidateScheduledTimes(); err != nil { + return errs.Wrap(err) + } + if err := c.updateSchedulesCampaignStartAndEndDates(incoming); err != nil { + return errs.Wrap(err) + } + } + // validate allow deny list selections + if err := c.validateAllowDenyIsSameTypeByIDs(ctx, incoming); err != nil { + return errs.Wrap(err) + } + // get campaign and change the values + current, err := c.CampaignRepository.GetByID( + ctx, + id, + &repository.CampaignOption{ + WithRecipientGroups: true, + }, + ) + if err != nil { + c.Logger.Errorw("failed to campaign by id", "error", err) + return errs.Wrap(err) + } + // check if the campaign is within the allowed time frame for editing + // we allow editing up until 5 minutes before the campaigns start time + if sendStartAt, err := current.SendStartAt.Get(); err == nil { + nowPlus5 := time.Now().Add(5 * time.Minute) + // c.Logger.Debugw("now (+5min): %s campaign: %s", nowPlus5.String(), sendStartAt.String()) + if nowPlus5.After(sendStartAt) { + c.Logger.Debugw( + "campaign too close to start to edit", + "campaignID", current.ID.MustGet().String(), + "nowPlus5", nowPlus5, + "sendStartAt", sendStartAt, + ) + return validate.WrapErrorWithField( + errors.New("Campaign already started or too close to start time"), + "Not allowed", + ) + } + } + + // update the values + if v, err := incoming.Name.Get(); err == nil { + // check uniqueness + var companyID *uuid.UUID + if cid, err := incoming.CompanyID.Get(); err == nil { + companyID = &cid + } + name := incoming.Name.MustGet() + isOK, err := repository.CheckNameIsUnique( + ctx, + c.CampaignRepository.DB, + "campaigns", + name.String(), + companyID, + id, + ) + if err != nil { + c.Logger.Errorw("failed to check campaign uniqueness", "error", err) + return errs.Wrap(err) + } + if !isOK { + c.Logger.Debugw("campaign name not unique", "name", name.String()) + return validate.WrapErrorWithField(errors.New("is not unique"), "name") + } + + current.Name.Set(v) + } + // update values + if v, err := incoming.SaveSubmittedData.Get(); err == nil { + current.SaveSubmittedData.Set(v) + } + if v, err := incoming.IsAnonymous.Get(); err == nil { + current.IsAnonymous.Set(v) + } + if v, err := incoming.SortField.Get(); err == nil { + current.SortField.Set(v) + } + if v, err := incoming.SortOrder.Get(); err == nil { + current.SortOrder.Set(v) + } + if v, err := incoming.SendStartAt.Get(); err == nil { + current.SendStartAt.Set(v.Truncate(time.Minute)) + } + if v, err := incoming.SendEndAt.Get(); err == nil { + current.SendEndAt.Set(v.Truncate(time.Minute)) + } + if v, err := incoming.ConstraintWeekDays.Get(); err == nil { + current.ConstraintWeekDays.Set(v) + } + if v, err := incoming.ConstraintStartTime.Get(); err == nil { + current.ConstraintStartTime.Set(v) + } + if v, err := incoming.ConstraintEndTime.Get(); err == nil { + current.ConstraintEndTime.Set(v) + } + if v, err := incoming.ClosedAt.Get(); err == nil { + current.ClosedAt.Set(v.Truncate(time.Minute)) + } + if v, err := incoming.AnonymizedAt.Get(); err == nil { + current.AnonymizedAt.Set(v.Truncate(time.Minute)) + } + if v, err := incoming.TemplateID.Get(); err == nil { + current.TemplateID.Set(v) + } + if v, err := incoming.RecipientGroupIDs.Get(); err == nil { + current.RecipientGroupIDs.Set(v) + } + if v, err := incoming.WebhookID.Get(); err == nil { + current.WebhookID.Set(v) + } + + // check there is atleast one valid group + // and remove any empty groups + validGroups := []*uuid.UUID{} + for _, groupID := range current.RecipientGroupIDs.MustGet() { + count, err := c.RecipientGroupRepository.GetRecipientCount(ctx, groupID) + if err != nil { + return errs.Wrap(err) + } + if count > 0 { + validGroups = append(validGroups, groupID) + } + } + if len(validGroups) == 0 { + return errs.NewValidationError( + errors.New("Selected groups have no recipients"), + ) + } + // overwrite the allow / deny + current.AllowDenyIDs = incoming.AllowDenyIDs + if _, err := current.AllowDenyIDs.Get(); err == nil { + if incoming.DenyPageID.IsSpecified() { + if incoming.DenyPageID.IsNull() { + current.DenyPageID.SetNull() + } else { + current.DenyPageID.Set(incoming.DenyPageID.MustGet()) + } + } + + } + // validate and update + if err := current.Validate(); err != nil { + return errs.Wrap(err) + } + err = c.CampaignRepository.UpdateByID(ctx, id, current) + if err != nil { + c.Logger.Errorw("failed to update campaign by id", "error", err) + return errs.Wrap(err) + } + // re-schedule the campaign + // TODO should this all be in the schedule method + // remove all existing schedules if the campaign is not self-managed + if !incoming.IsSelfManaged() { + err = c.CampaignRecipientRepository.DeleteByCampaigID( + ctx, + id, + ) + if err != nil { + c.Logger.Errorw("failed to remove recipient groups", "error", err) + return errs.Wrap(err) + } + err = c.CampaignRepository.RemoveCampaignRecipientGroups( + ctx, + id, + ) + if err != nil { + c.Logger.Errorw("failed to remove campaignrecipient groups", "error", err) + return errs.Wrap(err) + } + } else { + // if self managed remove only the campaign recipients groups + err = c.CampaignRepository.RemoveCampaignRecipientGroups( + ctx, + id, + ) + if err != nil { + c.Logger.Errorw("failed to remove recipient groups", "error", err) + return errs.Wrap(err) + } + } + if incoming.RecipientGroupIDs.IsSpecified() && !incoming.RecipientGroupIDs.IsNull() { + recipientGroupIDs := incoming.RecipientGroupIDs.MustGet() + err = c.CampaignRepository.AddRecipientGroups( + ctx, + id, + recipientGroupIDs, + ) + } + if err != nil { + c.Logger.Errorw("failed to add recipient groups", "error", err) + return errs.Wrap(err) + } + err = c.schedule(ctx, session, current) + if err != nil { + c.Logger.Errorw("failed to re-schedule campaign", "error", err) + return errs.Wrap(err) + } + c.AuditLogAuthorized(ae) + return nil +} + +// validateAllowDenyIsSameType checks if the allow and deny lists are of the same type +// allow and deny are mutually exclusive +func (c *Campaign) validateAllowDenyIsSameTypeByIDs( + ctx context.Context, + campaign *model.Campaign, +) error { + if campaign.AllowDenyIDs.IsSpecified() && !campaign.AllowDenyIDs.IsNull() { + allowDenyIDs := campaign.AllowDenyIDs.MustGet() + if len(allowDenyIDs) == 0 { + return nil + } + isAllowList := false + for i, id := range allowDenyIDs { + entry, err := c.AllowDenyRepository.GetByID(ctx, id, &repository.AllowDenyOption{}) + if err != nil { + c.Logger.Errorw("failed to get allow deny by id", "error", err) + } + allowed := entry.Allowed.MustGet() + if i == 0 { + isAllowList = allowed + continue + } + if isAllowList != allowed { + return validate.WrapErrorWithField(errors.New("allow and deny list are mutually exclusive"), "allowDenyIDs") + } + } + } + return nil +} + +// updateSchedulesCampaignStartAndEndDates updates the schedules for a campaign +// it uses the first and last selected weekday and along with sending times to adjust +// the start and end of the campaign +func (c *Campaign) updateSchedulesCampaignStartAndEndDates( + campaign *model.Campaign, +) error { + // get the first and last selected weekday + campaignWeekDays := campaign.ConstraintWeekDays.MustGet().AsSlice() + startAt := campaign.SendStartAt.MustGet() + endAt := campaign.SendEndAt.MustGet() + startTime := campaign.ConstraintStartTime.MustGet() + endTime := campaign.ConstraintEndTime.MustGet() + // find the first day and start time of sending + currentDate := startAt + startFound := false + startDay := time.Time{} + lastDay := time.Time{} + for currentDate.Before(endAt) || currentDate.Equal(endAt) { + if slices.Contains(campaignWeekDays, int(currentDate.Weekday())) { + if !startFound { + startDay = currentDate + startFound = true + } + lastDay = currentDate + } + currentDate = currentDate.AddDate(0, 0, 1) + } + campaign.SendStartAt.Set(startDay.Truncate(24 * time.Hour).Add(startTime.Minutes())) + campaign.SendEndAt.Set(lastDay.Truncate(24 * time.Hour).Add(endTime.Minutes())) + return nil +} + +// DeleteByID deletes a campaign by id +func (c *Campaign) DeleteByID( + ctx context.Context, + session *model.Session, + id *uuid.UUID, +) error { + ae := NewAuditEvent("Campaign.DeleteById", session) + ae.Details["id"] = id.String() + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + c.LogAuthError(err) + return errs.Wrap(err) + } + if !isAuthorized { + c.AuditLogNotAuthorized(ae) + return errs.ErrAuthorizationFailed + } + // delete all campaign-allowDeny relations to the campaign + err = c.CampaignRepository.RemoveAllowDenyListsByCampaignID(ctx, id) + if err != nil { + c.Logger.Errorw("failed to delete campaign allow deny by campaign id", "error", err) + return errs.Wrap(err) + } + // remove all related events + err = c.CampaignRepository.DeleteEventsByCampaignID(ctx, id) + if err != nil { + c.Logger.Errorw("failed to delete campaign events by campaign id", "error", err) + return errs.Wrap(err) + } + // delete the relation between the campaign and the recipient groups + err = c.CampaignRepository.RemoveCampaignRecipientGroups(ctx, id) + if err != nil { + c.Logger.Errorw("failed to delete campaign recipient groups by campaign id", + "campaignID", id.String(), + "error", err, + ) + return errs.Wrap(err) + } + err = c.CampaignRecipientRepository.DeleteByCampaigID( + ctx, + id, + ) + if err != nil { + c.Logger.Errorw("failed to remove recipient groups", "error", err) + return errs.Wrap(err) + } + // delete campaign + err = c.CampaignRepository.DeleteByID(ctx, id) + if err != nil { + c.Logger.Errorw("failed to delete campaign by id", "error", err) + return errs.Wrap(err) + } + c.AuditLogAuthorized(ae) + return nil +} + +// SendNextBatch sends the next batch of emails +// atm this is only audit logged on auth failures +func (c *Campaign) SendNextBatch( + ctx context.Context, + session *model.Session, +) error { + ae := NewAuditEvent("Campaign.SendNextBatch", session) + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + c.LogAuthError(err) + return errs.Wrap(err) + } + if !isAuthorized { + c.AuditLogNotAuthorized(ae) + return errs.ErrAuthorizationFailed + } + // get next batch + campaignRecipients, err := c.CampaignRecipientRepository.GetUnsendRecipientsForSending( + ctx, + 1000, // limit + &repository.CampaignRecipientOption{ + WithRecipient: true, + }, + ) + if err != nil { + c.Logger.Errorw("failed to get next batch", "error", err) + return errs.Wrap(err) + } + // group campaignrecipients by campaign so if it is send via SMTP we can reuse + // the connection + campaignMap := map[string][]*model.CampaignRecipient{} + for _, campaignRecipient := range campaignRecipients { + campaignID := campaignRecipient.CampaignID.MustGet().String() + campaignMap[campaignID] = append(campaignMap[campaignID], campaignRecipient) + } + // iterate each campaign and send the messages + for campaignID, campaignRecipients := range campaignMap { + err = c.sendCampaignMessages(ctx, session, campaignID, campaignRecipients) + if err != nil { + c.Logger.Errorw("failed to send campaign messages", "error", err) + continue + } + } + + return errs.Wrap(err) +} + +func (c *Campaign) sendCampaignMessages( + ctx context.Context, + session *model.Session, + cid string, + campaignRecipients []*model.CampaignRecipient, +) error { + campaignID := uuid.MustParse(cid) + // fetch the campaign to ensure that it is still active and to fetch details for sending + campaign, err := c.CampaignRepository.GetByID( + ctx, + &campaignID, + &repository.CampaignOption{ + WithCampaignTemplate: false, + }, + ) + if err != nil { + c.Logger.Errorw("failed to get campaign by id", + "campaignID", campaignID, + "error", err, + ) + return errs.Wrap(err) + } + // check if the campaign has been close while sending is being processed + if !campaign.IsActive() { + c.Logger.Debugw("campaign is not active", + "campaignID", campaign.ID.MustGet().String(), + ) + return errors.New("skipping send, campaign is not active") + } + // fetch the campaign cTemplate to get the sender and message to send + templateID, err := campaign.TemplateID.Get() + if err != nil { + c.Logger.Infow("campaign has no template", "error", err) + return errs.Wrap(errors.New("skipping send, campaign has no template")) + } + cTemplate, err := c.CampaignTemplateService.GetByID( + ctx, + session, + &templateID, + &repository.CampaignTemplateOption{ + WithDomain: true, + WithSMTPConfiguration: true, + WithIdentifier: true, + }, + ) + if err != nil { + c.Logger.Errorw("failed to get campaign template by id", "error", err) + closeErr := c.closeCampaign( + ctx, + session, + &campaignID, + campaign, + "failed get email", + ) + return errs.Wrap(errors.Join(err, closeErr)) + } + // domain + domain := cTemplate.Domain + if domain == nil { + // if the domain has been removed from the campaign template used in this campaign, close the campaign + closeErr := c.closeCampaign( + ctx, + session, + &campaignID, + campaign, + "Campaign does not have a domain relation", + ) + if closeErr != nil { + return errs.Wrap(errors.Join(err, closeErr)) + } + c.Logger.Warnw("Running campaign does not have a domain relation - cancelling campaign", + "campaignID", campaignID.String(), + ) + return nil + } + // get email details + emailID, err := cTemplate.EmailID.Get() + if err != nil { + c.Logger.Warnw("Running campaign does not have a email relation - cancelling campaign", + "campaignID", campaignID.String(), + ) + // if the email relation has been removed from the campaign template used in this campagin, close the campaign + closeErr := c.closeCampaign( + ctx, + session, + &campaignID, + campaign, + "Campaign does not have a email relation", + ) + if closeErr != nil { + return errs.Wrap(errors.Join(err, closeErr)) + } + return nil + } + email, err := c.MailService.GetByID( + ctx, + session, + &emailID, + ) + if err != nil { + closeErr := c.closeCampaign( + ctx, + session, + &campaignID, + campaign, + "failed get email", + ) + return errs.Wrap(errors.Join(err, closeErr)) + } + content, err := email.Content.Get() + if err != nil { + // if mail templates fails to parse, close the campaign + closeErr := c.closeCampaign( + ctx, + session, + &campaignID, + campaign, + "failed get email content", + ) + return errs.Wrap(errors.Join(err, closeErr)) + } + t := template.New("email") + t = t.Funcs(TemplateFuncs()) + mailTmpl, err := t.Parse(content.String()) + if err != nil { + // if mail templates fails to parse, close the campaign + closeErr := c.closeCampaign( + ctx, + session, + &campaignID, + campaign, + "failed to parse the template", + ) + return errs.Wrap(errors.Join(err, closeErr)) + } + // check if sending is API or SMTP + isSmtpCampaign := cTemplate.SMTPConfigurationID.IsSpecified() && !cTemplate.SMTPConfigurationID.IsNull() + isAPISenderCampaign := cTemplate.APISenderID.IsSpecified() && !cTemplate.APISenderID.IsNull() + // close the campaign + if !isSmtpCampaign && !isAPISenderCampaign { + c.Logger.Warnw("Running campaign does not have a SMTP or API sender relation - cancelling campaign", + "campaignID", campaignID.String(), + ) + // if there is no smtp config or api sender, then one of them has been removed from the campaigns template + return c.closeCampaign( + ctx, + session, + &campaignID, + campaign, + "Campaign does not have a either an SMTP configuration or an API Sender", + ) + } + if isAPISenderCampaign { + // send via API + for _, campaignRecipient := range campaignRecipients { + // update the last attempt at timestamp so we do not accidently try sending the same + // email again if a panic or error happens in a 3. party lib. + campaignRecipientID := campaignRecipient.ID.MustGet() + campaignRecipient.LastAttemptAt = nullable.NewNullableWithValue(time.Now()) + err := c.CampaignRecipientRepository.UpdateByID( + ctx, + &campaignRecipientID, + campaignRecipient, + ) + if err != nil { + c.Logger.Errorw("CRITICAL - failed to update last attempted at - aborting", + "error", err, + ) + return errs.Wrap( + fmt.Errorf("failed to update last attempted at: %s \nThis is critical for sending, aborting...", err), + ) + } + err = c.APISenderService.Send( + ctx, + session, + cTemplate, + campaignRecipient, + domain, + mailTmpl, + email, + ) + if err != nil { + c.Logger.Errorw("failed to send message via. API", "error", err) + } + err = c.saveSendingResult( + ctx, + campaignRecipient, + err, + ) + if err != nil { + c.Logger.Errorw("failed to save sending result", "error", err) + return errs.Wrap(err) + } + } + err = c.setMostNotableCampaignEvent( + ctx, + campaign, + data.EVENT_CAMPAIGN_ACTIVE, + ) + if err != nil { + // err is logged in method call + return errs.Wrap(err) + } + return nil + } + if !isSmtpCampaign { + c.Logger.Error("no sender configuration found") + return errors.New("no sender configuration found") + } + // get the SMTP configuration + smtpConfigID, err := cTemplate.SMTPConfigurationID.Get() + if err != nil { + c.Logger.Infow( + "failed to get SMTP configuration from template - template no longer usable", + "smtpConfigID", smtpConfigID, + ) + return errs.Wrap(err) + } + smtpConfig, err := c.SMTPConfigService.GetByID( + ctx, + session, + &smtpConfigID, + &repository.SMTPConfigurationOption{ + WithHeaders: true, + }, + ) + if err != nil && !errors.Is(err, gorm.ErrRecordNotFound) { + c.Logger.Errorw("smtp configuration did not load", "error", err) + return errs.Wrap(err) + } + smtpPort, err := smtpConfig.Port.Get() + if err != nil { + c.Logger.Errorw("failed to get smtp port", "error", err) + return errs.Wrap(err) + } + smtpHost, err := smtpConfig.Host.Get() + if err != nil { + c.Logger.Errorw("failed to get smtp host", "error", err) + return errs.Wrap(err) + } + smtpIgnoreCertErrors, err := smtpConfig.IgnoreCertErrors.Get() + if err != nil { + c.Logger.Errorw("failed to get smtp ignore cert errors", "error", err) + return errs.Wrap(err) + } + emailOptions := []mail.Option{ + mail.WithPort(smtpPort.Int()), + mail.WithTLSConfig( + &tls.Config{ + ServerName: smtpHost.String(), + // #nosec + InsecureSkipVerify: smtpIgnoreCertErrors, + // MinVersion: tls.VersionTLS12, + }, + ), + } + // setup authentication if provided + username, err := smtpConfig.Username.Get() + if err != nil { + c.Logger.Errorw("failed to get smtp username", "error", err) + return errs.Wrap(err) + } + password, err := smtpConfig.Password.Get() + if err != nil { + c.Logger.Errorw("failed to get smtp password", "error", err) + return errs.Wrap(err) + } + if un := username.String(); len(un) > 0 { + emailOptions = append( + emailOptions, + mail.WithUsername( + un, + ), + ) + if pw := password.String(); len(pw) > 0 { + emailOptions = append( + emailOptions, + mail.WithPassword( + pw, + ), + ) + } + } + // prepare all messages + messageOptions := []mail.MsgOption{ + mail.WithNoDefaultUserAgent(), + } + // create maps between recipients and messages + // and prepare all messages + messages := []*mail.Msg{} + mailToCampaignRecipient := make(map[string]*model.CampaignRecipient, len(campaignRecipients)) + for _, campaignRecipient := range campaignRecipients { + // update the last attempt at timestamp so we do not accidently try sending the same + // email again if a panic or error happens in a 3. party lib. + campaignRecipientID := campaignRecipient.ID.MustGet() + campaignRecipient.LastAttemptAt = nullable.NewNullableWithValue(time.Now()) + err := c.CampaignRecipientRepository.UpdateByID( + ctx, + &campaignRecipientID, + campaignRecipient, + ) + if err != nil { + c.Logger.Errorw("CRITICAL - failed to update last attempted at - aborting", + "error", err, + ) + return fmt.Errorf("failed to update last attempted at: %s \nThis is critical for sending, aborting...", err) + } + m := mail.NewMsg(messageOptions...) + /* TODO at the moment the mail envelope from is a email, so it can not be empty by definition + if envelopefrom.string() == "" { + // extract the email only from mail.mailheaderfrom + // and use that as the envelope from + // this is a fallback if the envelope from is not set + address, err := netmail.parseaddress(email.mailheaderfrom) + if err != nil { + c.logger.errorw("failed to parse mail header 'from'", "error", err) + return false,errs.Wrap(err) + } + err = m.envelopefrom(address.address) + if err != nil { + c.logger.errorw("failed to set envelope from", "error", err) + return false,errs.Wrap(err) + } + } else { + err = m.envelopefrom(email.mailenvelopefrom) + if err != nil { + c.logger.errorw("failed to set envelope from", "error", err) + return false,errs.Wrap(err) + } + } + */ + err = m.EnvelopeFrom(email.MailEnvelopeFrom.MustGet().String()) + if err != nil { + c.Logger.Errorw("failed to set envelope from", "error", err) + return errs.Wrap(err) + } + // headers + err = m.From(email.MailHeaderFrom.MustGet().String()) + if err != nil { + c.Logger.Errorw("failed to set mail header 'From'", "error", err) + return errs.Wrap(err) + } + // handle a race where the recipient has been removed/anonymized + if campaignRecipient.Recipient == nil { + crid := campaignRecipient.ID.MustGet() + err := c.CampaignRecipientRepository.Cancel( + ctx, + []*uuid.UUID{&crid}, + ) + if err != nil { + return errors.New("Missing recipient from campaign recipient") + } + c.Logger.Info("A campaign recipient had no recipient - cancelled - this can happend in rare race conditions or curruption bugs") + continue + } + recpEmail := campaignRecipient.Recipient.Email.MustGet().String() + err = m.To(recpEmail) + if err != nil { + c.Logger.Errorw("failed to set mail header 'To'", "error", err) + return errs.Wrap(err) + } + // store a map between recipient email and message + // so we can later save the sending result + mailToCampaignRecipient[m.GetToString()[0]] = campaignRecipient + // custom headers + if headers := smtpConfig.Headers; headers != nil { + for _, header := range headers { + key := header.Key.MustGet() + value := header.Value.MustGet() + m.SetGenHeader( + mail.Header(key.String()), + value.String(), + ) + } + } + m.Subject(email.MailHeaderSubject.MustGet().String()) + domainName, err := domain.Name.Get() + if err != nil { + c.Logger.Errorw("failed to get domain name", "error", err) + return errs.Wrap(err) + } + urlIdentifier := cTemplate.URLIdentifier + if urlIdentifier == nil { + c.Logger.Error("url identifier is MUST be loaded for the campaign template") + return fmt.Errorf("url identifier is MUST be loaded for the campaign template") + } + urlPath := cTemplate.URLPath.MustGet().String() + t := c.TemplateService.CreateMail( + domainName.String(), + urlIdentifier.Name.MustGet(), + urlPath, + campaignRecipient, + email, + nil, + ) + err = m.SetBodyHTMLTemplate(mailTmpl, t) + if err != nil { + c.Logger.Errorw("failed to set body html template", "error", err) + return errs.Wrap(err) + } + // attachments + attachments := email.Attachments + for _, attachment := range attachments { + p, err := c.MailService.AttachmentService.GetPath(attachment) + if err != nil { + return fmt.Errorf("failed to get attachment path: %s", err) + } + if !attachment.EmbeddedContent.MustGet() { + m.AttachFile(p.String()) + } else { + attachmentContent, err := os.ReadFile(p.String()) + if err != nil { + return errs.Wrap(err) + } + // hacky setup of attachment for executing as email template + attachmentAsEmail := model.Email{ + ID: email.ID, + CreatedAt: email.CreatedAt, + UpdatedAt: email.UpdatedAt, + Name: email.Name, + MailEnvelopeFrom: email.MailEnvelopeFrom, + MailHeaderFrom: email.MailHeaderFrom, + MailHeaderSubject: email.MailHeaderSubject, + Content: email.Content, + AddTrackingPixel: email.AddTrackingPixel, + CompanyID: email.CompanyID, + Attachments: email.Attachments, + Company: email.Company, + } + // really hacky / unsafe + attachmentAsEmail.Content = nullable.NewNullableWithValue( + *vo.NewUnsafeOptionalString1MB(string(attachmentContent)), + ) + attachmentStr, err := c.TemplateService.CreateMailBody( + urlIdentifier.Name.MustGet(), + urlPath, + domain, + campaignRecipient, + &attachmentAsEmail, + nil, + ) + if err != nil { + return errs.Wrap(fmt.Errorf("failed to setup attachment with embedded content: %s", err)) + } + m.AttachReadSeeker( + filepath.Base(p.String()), + strings.NewReader(attachmentStr), + ) + } + } + messages = append(messages, m) + } + + // send the messages + // the client sends all the messages and ensure that all messages are sent + // in the same connection + var mc *mail.Client + + // Try different authentication methods based on configuration + // If username is provided, use authentication; otherwise try without auth first + if un := username.String(); len(un) > 0 { + // Try CRAM-MD5 first when credentials are provided + emailOptionsCRAM5 := append(emailOptions, mail.WithSMTPAuth(mail.SMTPAuthCramMD5)) + mc, _ = mail.NewClient(smtpHost.String(), emailOptionsCRAM5...) + if build.Flags.Production { + mc.SetTLSPolicy(mail.TLSMandatory) + } else { + mc.SetTLSPolicy(mail.TLSOpportunistic) + } + err = mc.DialAndSendWithContext(ctx, messages...) + + // Check if it's an authentication error and try PLAIN auth + if err != nil && (strings.Contains(err.Error(), "535 ") || + strings.Contains(err.Error(), "534 ") || + strings.Contains(err.Error(), "538 ") || + strings.Contains(err.Error(), "CRAM-MD5") || + strings.Contains(err.Error(), "authentication failed")) { + c.Logger.Warnw("CRAM-MD5 authentication failed, trying PLAIN auth", "error", err) + emailOptionsBasic := emailOptions + if build.Flags.Production { + emailOptionsBasic = append(emailOptions, mail.WithSMTPAuth(mail.SMTPAuthPlain)) + } + mc, _ = mail.NewClient(smtpHost.String(), emailOptionsBasic...) + if build.Flags.Production { + mc.SetTLSPolicy(mail.TLSMandatory) + } else { + mc.SetTLSPolicy(mail.TLSOpportunistic) + } + err = mc.DialAndSendWithContext(ctx, messages...) + } + } else { + // No credentials provided, try without authentication (e.g., local postfix) + mc, _ = mail.NewClient(smtpHost.String(), emailOptions...) + if build.Flags.Production { + mc.SetTLSPolicy(mail.TLSMandatory) + } else { + mc.SetTLSPolicy(mail.TLSOpportunistic) + } + err = mc.DialAndSendWithContext(ctx, messages...) + + // If no-auth fails and we get an auth-related error, log it appropriately + if err != nil && (strings.Contains(err.Error(), "530 ") || + strings.Contains(err.Error(), "535 ") || + strings.Contains(err.Error(), "authentication required") || + strings.Contains(err.Error(), "AUTH")) { + c.Logger.Warnw("Server requires authentication but no credentials provided", "error", err) + } + } + if err != nil { + c.Logger.Errorw("failed to send test email", "error", err) + } + // check each message if has been sent and save the result for each + for _, m := range messages { + var sendError error = nil + if m.HasSendError() { + sendError = m.SendError() + } + // deref 0 as only a single recipient in each mail + to := m.GetToString()[0] + campaignRecipient := mailToCampaignRecipient[to] + err := c.saveSendingResult( + ctx, + campaignRecipient, + sendError, + ) + if err != nil { + c.Logger.Errorw("failed to save sending result", "error", err) + } + } + // check if most notable event + err = c.setMostNotableCampaignEvent( + ctx, + campaign, + data.EVENT_CAMPAIGN_ACTIVE, + ) + if err != nil { + // err is logged in method call + return errs.Wrap(err) + } + return nil +} + +// saveSendingResult saves a result from a send campaign atttempts +func (c *Campaign) saveSendingResult( + ctx context.Context, + campaignRecipient *model.CampaignRecipient, + sendError error, +) error { + if sendError == nil { + campaignRecipient.SentAt = nullable.NewNullableWithValue(time.Now()) + } + campaignRecipientID := campaignRecipient.ID.MustGet() + err := c.CampaignRecipientRepository.UpdateByID( + ctx, + &campaignRecipientID, + campaignRecipient, + ) + if err != nil { + c.Logger.Errorw("failed to update campaign recipient by id", "error", err) + } + // persist the event + id := uuid.New() + eventName := data.EVENT_CAMPAIGN_RECIPIENT_MESSAGE_SENT + if sendError != nil { + eventName = data.EVENT_CAMPAIGN_RECIPIENT_MESSAGE_FAILED + } + eventID := cache.EventIDByName[eventName] + data := vo.NewEmptyOptionalString1MB() + if sendError != nil { + data, err = vo.NewOptionalString1MB(sendError.Error()) + if err != nil { + return errs.Wrap(fmt.Errorf("failed to create data: %s", err)) + } + } + campaignID := campaignRecipient.CampaignID.MustGet() + recipientID := campaignRecipient.RecipientID.MustGet() + campaign, err := c.CampaignRepository.GetByID( + ctx, + &campaignID, + &repository.CampaignOption{}, + ) + if err != nil { + return errs.Wrap(err) + } + var campaignEvent *model.CampaignEvent + if !campaign.IsAnonymous.MustGet() { + campaignEvent = &model.CampaignEvent{ + ID: &id, + CampaignID: &campaignID, + RecipientID: &recipientID, + IP: vo.NewOptionalString64Must(""), + UserAgent: vo.NewOptionalString255Must(""), + EventID: eventID, + Data: data, + } + } else { + campaignEvent = &model.CampaignEvent{ + ID: &id, + CampaignID: &campaignID, + RecipientID: nil, + IP: vo.NewOptionalString64Must(""), + UserAgent: vo.NewOptionalString255Must(""), + EventID: eventID, + Data: data, + } + } + err = c.CampaignRepository.SaveEvent(ctx, campaignEvent) + if err != nil { + return fmt.Errorf("failed to save event: %s", err) + } + // handle most notable event + err = c.SetNotableCampaignRecipientEvent( + ctx, + campaignRecipient, + cache.EventNameByID[eventID.String()], + ) + if err != nil { + // logging was done in the previous call + return errs.Wrap(err) + } + // handle webhook + webhookID, err := c.CampaignRepository.GetWebhookIDByCampaignID(ctx, &campaignID) + if err != nil && !errors.Is(err, gorm.ErrRecordNotFound) { + c.Logger.Errorw("failed to get webhook id by campaign id", "error", err) + return errs.Wrap(err) + } + if webhookID == nil { + return nil + } + err = c.HandleWebhook( + ctx, + webhookID, + &campaignID, + &recipientID, + eventName, + ) + if err != nil { + return errs.Wrap(err) + } + return nil +} + +// saveEventCampaignClose saves an event about closing a campaign +func (c *Campaign) saveEventCampaignClose( + ctx context.Context, + campaignID *uuid.UUID, + reason string, +) error { + // persist the event + id := uuid.New() + r, err := vo.NewOptionalString1MB(reason) + if err != nil { + return errs.Wrap(err) + } + campaignEvent := &model.CampaignEvent{ + ID: &id, + CampaignID: campaignID, + RecipientID: nil, + IP: vo.NewOptionalString64Must(""), + UserAgent: vo.NewOptionalString255Must(""), + EventID: cache.EventIDByName[data.EVENT_CAMPAIGN_CLOSED], + Data: r, + } + err = c.CampaignRepository.SaveEvent(ctx, campaignEvent) + if err != nil { + return fmt.Errorf("failed to save event: %s", err) + } + // handle webhook + webhookID, err := c.CampaignRepository.GetWebhookIDByCampaignID(ctx, campaignID) + if err != nil && !errors.Is(err, gorm.ErrRecordNotFound) { + c.Logger.Errorw("failed to get webhook id by campaign id", "error", err) + return errs.Wrap(err) + } + if webhookID == nil { + return nil + } + err = c.HandleWebhook( + ctx, + webhookID, + campaignID, + nil, + data.EVENT_CAMPAIGN_CLOSED, + ) + if err != nil { + return errs.Wrap(err) + } + return nil +} + +// HandleCloseCampaigns closes campaigns that are past their end time +func (c *Campaign) HandleCloseCampaigns( + ctx context.Context, + session *model.Session, +) error { + ae := NewAuditEvent("Campaign.HandleCloseCampaigns", session) + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + c.LogAuthError(err) + return errs.Wrap(err) + } + if !isAuthorized { + c.AuditLogNotAuthorized(ae) + return errs.ErrAuthorizationFailed + } + // get all campaigns that are past their end time + // and not yet closed + campaigns, err := c.CampaignRepository.GetAllReadyToClose( + ctx, + &repository.CampaignOption{}, + ) + if err != nil { + c.Logger.Errorw("failed to get closed campaigns", "error", err) + return errs.Wrap(err) + } + // close the campaigns + closedCampaignIDs := []string{} + for _, campaign := range campaigns.Rows { + campaignID := campaign.ID.MustGet() + closedCampaignIDs = append(closedCampaignIDs, campaignID.String()) + c.Logger.Debugw("closing campaign with id", "campaignID", campaignID) + var err error + // if there is no campaign template closing is due to missing template + campaignTemplateID, err := campaign.TemplateID.Get() + if err != nil { + err = c.closeCampaign( + ctx, + session, + &campaignID, + campaign, + "Campaign closed due to missing campaign template", + ) + c.handleCloseError(err, &campaignID) + return errs.Wrap(err) + } + // check if the template is unusable + cTemplate, err := c.CampaignTemplateService.GetByID( + ctx, + session, + &campaignTemplateID, + &repository.CampaignTemplateOption{}, + ) + if cTemplate == nil || err != nil { + err = c.closeCampaign( + ctx, + session, + &campaignID, + campaign, + "Campaign closed due to unusable template", + ) + c.handleCloseError(err, &campaignID) + return errs.Wrap(err) + } + err = c.closeCampaign( + ctx, + session, + &campaignID, + campaign, + "Campaign closed due to over close time", + ) + } + if len(closedCampaignIDs) > 0 { + ae.Details["closedCampaignIds"] = closedCampaignIDs + c.AuditLogAuthorized(ae) + } + return nil +} + +func (c *Campaign) handleCloseError(err error, campaignID *uuid.UUID) { + if err != nil && !errors.Is(err, errs.ErrCampaignAlreadyClosed) { + c.Logger.Errorw("failed to close campaign by id", "error", err) + return + } + if go_errors.Is(err, errs.ErrCampaignAlreadyClosed) { + c.Logger.Debugw("campaign already closed", "error", err) + return + } + c.Logger.Debugw("closed campaign with id", "campaignID", campaignID) +} + +// HandleAnonymizeCampaigns anonymizes campaigns are ready for anonymization +func (c *Campaign) HandleAnonymizeCampaigns( + ctx context.Context, + session *model.Session, +) error { + ae := NewAuditEvent("Campaign.HandleAnonymizeCampaigns", session) + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + c.LogAuthError(err) + return errs.Wrap(err) + } + if !isAuthorized { + c.AuditLogNotAuthorized(ae) + return errs.ErrAuthorizationFailed + } + // get all campaigns that are past their end time + // and not yet closed + campaigns, err := c.CampaignRepository.GetReadyToAnonymize( + ctx, + &repository.CampaignOption{}, + ) + if err != nil { + c.Logger.Errorw("failed to get ready to anonymize campaigns", "error", err) + return errs.Wrap(err) + } + // close and anonymize the campaigns + affectedIds := []string{} + for _, campaign := range campaigns.Rows { + campaignID := campaign.ID.MustGet() + affectedIds = append(affectedIds, campaignID.String()) + c.Logger.Debugw("anonymizing campaign with id", "campaignID", campaignID) + err = c.AnonymizeByID(ctx, session, &campaignID) + + if err != nil && !errors.Is(err, errs.ErrCampaignAlreadyClosed) { + c.Logger.Errorw("failed to anonymize campaign by id", "error", err) + continue + } + if errors.Is(err, errs.ErrCampaignAlreadyAnonymized) { + c.Logger.Debugw("campaign already anonymized", "error", err) + continue + } + c.Logger.Debugw("anonymized campaign with id", "campaignID", campaignID) + } + if len(affectedIds) > 0 { + ae.Details["anonymizedCampaignIds"] = affectedIds + c.AuditLogAuthorized(ae) + } + return nil +} + +// CloseCampaignByID closes a campaign by id +func (c *Campaign) CloseCampaignByID( + ctx context.Context, + session *model.Session, + id *uuid.UUID, +) error { + ae := NewAuditEvent("Campaign.CloseCampaignByID", session) + ae.Details["id"] = id.String() + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + c.LogAuthError(err) + return errs.Wrap(err) + } + if !isAuthorized { + c.AuditLogNotAuthorized(ae) + return errs.Wrap(errs.ErrAuthorizationFailed) + } + // get the campaign + campaign, err := c.CampaignRepository.GetByID( + ctx, + id, + &repository.CampaignOption{}, + ) + if err != nil { + c.Logger.Errorw("failed to get campaign by id: %s", err) + return errs.Wrap(err) + } + err = c.closeCampaign(ctx, session, id, campaign, "Manually closed") + if err != nil { + c.Logger.Errorw("failed to close campaign by id", "error", err) + return errs.Wrap(err) + } + c.AuditLogAuthorized(ae) + return nil +} + +// closeCampaign closes a campaign +func (c *Campaign) closeCampaign( + ctx context.Context, + session *model.Session, + id *uuid.UUID, + campaign *model.Campaign, + reason string, +) error { + if campaign == nil { + return errs.NewCustomError(errors.New("campaign is nil")) + } + c.Logger.Debugw("closing campaign with id", "id", id.String()) + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + c.LogAuthError(err) + return errs.Wrap(err) + } + if !isAuthorized { + return errs.ErrAuthorizationFailed + } + // find all recipients that are not sent and cancel them + campaignRecipients, err := c.CampaignRecipientRepository.GetUnsendRecipients( + ctx, + repository.NO_LIMIT, + &repository.CampaignRecipientOption{}, + ) + c.Logger.Debugw("found unsent recipients to cancel", "count", len(campaignRecipients)) + if err != nil && !errors.Is(err, gorm.ErrRecordNotFound) { + c.Logger.Errorw("failed to get unsent recipients", "error", err) + return errs.Wrap(err) + } + campaignRecipientUUIDs := []*uuid.UUID{} + for _, cr := range campaignRecipients { + campaignRecipientID := cr.ID.MustGet() + campaignRecipientUUIDs = append(campaignRecipientUUIDs, &campaignRecipientID) + } + err = c.CampaignRecipientRepository.Cancel(ctx, campaignRecipientUUIDs) + if err != nil { + c.Logger.Errorw("failed to cancel recipients", "error", err) + return errs.Wrap(err) + } + err = campaign.Closed() + if go_errors.Is(err, errs.ErrCampaignAlreadyClosed) { + c.Logger.Debugw("campaign already closed", "error", err) + return errs.Wrap(err) + } + if err != nil { + c.Logger.Errorw("failed to close campaign by id", "error", err) + return errs.Wrap(err) + } + err = c.CampaignRepository.UpdateByID(ctx, id, campaign) + if err != nil { + c.Logger.Errorw("failed to close campaign by id", "error", err) + return errs.Wrap(err) + } + err = c.saveEventCampaignClose( + ctx, + id, + reason, + ) + if err != nil { + c.Logger.Errorw("failed to save event about closing campaign", "error", err) + } + err = c.setMostNotableCampaignEvent( + ctx, + campaign, + data.EVENT_CAMPAIGN_CLOSED, + ) + if err != nil { + // err is logged in method call + return errs.Wrap(err) + } + + // Generate campaign statistics when closing (skip test campaigns) + if !campaign.IsTest.MustGet() { + c.Logger.Debugf("generating campaign statistics", "campaignID", id.String()) + err = c.GenerateCampaignStats(ctx, session, id) + if err != nil { + c.Logger.Errorw("failed to generate campaign statistics", "error", err, "campaignID", id.String()) + // Don't fail the close operation if stats generation fails + } else { + c.Logger.Debugf("successfully generated campaign statistics", "campaignID", id.String()) + } + } else { + c.Logger.Debugf("skipping stats generation for test campaign", "campaignID", id.String()) + } + + return nil +} + +// GetCampaignEmailBody returns the rendered email for a self managed campaign recipient +func (c *Campaign) GetCampaignEmailBody( + ctx context.Context, + session *model.Session, + campaignRecipientID *uuid.UUID, +) (string, error) { + ae := NewAuditEvent("Campaign.GetCampaignEmailBody", session) + ae.Details["campaignRecipientId"] = campaignRecipientID.String() + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + c.LogAuthError(err) + return "", errs.Wrap(err) + } + if !isAuthorized { + c.AuditLogNotAuthorized(ae) + return "", errs.ErrAuthorizationFailed + } + // check recipient is in a active campaign + campaignRecipient, err := c.CampaignRecipientRepository.GetByID( + ctx, + campaignRecipientID, + &repository.CampaignRecipientOption{ + WithRecipient: true, + }, + ) + if err != nil { + c.Logger.Errorw("failed to get campaign recipient by id", "error", err) + return "", errs.Wrap(err) + } + if campaignRecipient.RecipientID.IsNull() { + return "", errs.NewCustomError( + errors.New("recipient is anonymized"), + ) + } + campaignID := campaignRecipient.CampaignID.MustGet() + campaign, err := c.CampaignRepository.GetByID( + ctx, + &campaignID, + &repository.CampaignOption{}, + ) + if err != nil { + c.Logger.Errorw("failed to get campaign by id", "error", err) + return "", errs.Wrap(err) + } + templateID, err := campaign.TemplateID.Get() + if err != nil { + c.Logger.Errorw("failed to get template from campaign, has no template", "error", err) + return "", errs.Wrap(err) + } + cTemplate, err := c.CampaignTemplateService.GetByID( + ctx, + session, + &templateID, + &repository.CampaignTemplateOption{ + WithIdentifier: true, + }, + ) + emailID, err := cTemplate.EmailID.Get() + if err != nil { + c.Logger.Infow("failed email from template - template ID", "templateID", templateID) + return "", errs.NewValidationError( + errors.New("Campaign template has no email"), + ) + } + // get the email + email, err := c.MailService.GetByID( + ctx, + session, + &emailID, + ) + if err != nil { + c.Logger.Errorw("failed to get message by id", "error", err) + return "", errs.Wrap(err) + } + domainID, err := cTemplate.DomainID.Get() + if err != nil { + c.Logger.Infow("failed domain from template - template ID: %s", templateID) + return "", errs.NewValidationError( + errors.New("Campaign template has no domain"), + ) + } + domain, err := c.DomainService.GetByID( + ctx, + session, + &domainID, + &repository.DomainOption{}, + ) + if err != nil { + c.Logger.Errorw("failed to get domain by id", "error", err) + return "", errs.Wrap(err) + } + urlIdentifier := cTemplate.URLIdentifier + if urlIdentifier == nil { + return "", errors.New("url identifier is nil") + } + urlPath := cTemplate.URLPath.MustGet().String() + + // no audit on read + return c.TemplateService.CreateMailBody( + urlIdentifier.Name.MustGet(), + urlPath, + domain, + campaignRecipient, + email, + nil, + ) +} + +// GetLandingPageURLByCampaignRecipientID returns the URL for a campaign recipient +func (c *Campaign) GetLandingPageURLByCampaignRecipientID( + ctx context.Context, + session *model.Session, + campaignRecipientID *uuid.UUID, +) (string, error) { + ae := NewAuditEvent("Campaign.GetLandingPageURLByCampaignRecipientID", session) + ae.Details["campaignRecipientId"] = campaignRecipientID.String() + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + c.LogAuthError(err) + return "", errs.Wrap(err) + } + if !isAuthorized { + c.AuditLogNotAuthorized(ae) + return "", errs.ErrAuthorizationFailed + } + campaignRecipient, err := c.CampaignRecipientRepository.GetByID( + ctx, + campaignRecipientID, + &repository.CampaignRecipientOption{}, + ) + if err != nil { + c.Logger.Errorw("failed to get campaign recipient by id", "error", err) + return "", errs.Wrap(err) + } + campaignID := campaignRecipient.CampaignID.MustGet() + campaign, err := c.CampaignRepository.GetByID( + ctx, + &campaignID, + &repository.CampaignOption{}, + ) + if err != nil { + c.Logger.Errorw("failed to get campaign by id", "error", err) + return "", errs.Wrap(err) + } + templateID, err := campaign.TemplateID.Get() + if err != nil { + c.Logger.Errorw("failed to get campaign template, campaign has no template", "error", err) + return "", errs.Wrap(err) + } + cTemplate, err := c.CampaignTemplateService.GetByID( + ctx, + session, + &templateID, + &repository.CampaignTemplateOption{ + WithIdentifier: true, + }, + ) + domainID, err := cTemplate.DomainID.Get() + if err != nil { + c.Logger.Infow("failed email from template - template ID", "templateID", templateID) + return "", errs.NewValidationError( + errors.New("Campaign template has no email"), + ) + } + domain, err := c.DomainService.GetByID( + ctx, + session, + &domainID, + &repository.DomainOption{}, + ) + if err != nil { + c.Logger.Errorw("failed to get domain by id", err) + return "", errs.Wrap(err) + } + urlPath := cTemplate.URLPath.MustGet().String() + baseURL := "https://" + domain.Name.MustGet().String() + idIdentifier := cTemplate.URLIdentifier.Name.MustGet() + url := fmt.Sprintf("%s%s?%s=%s", baseURL, urlPath, idIdentifier, campaignRecipientID.String()) + // no audit on read + return url, nil +} + +// SetSentAtByCampaignRecipientID sets the sent at time for a recipient +func (c *Campaign) SetSentAtByCampaignRecipientID( + ctx context.Context, + session *model.Session, + campaignRecipientID *uuid.UUID, +) error { + ae := NewAuditEvent("Campaign.SetSentAtByCampaignRecipientID", session) + ae.Details["campaignRecipientId"] = campaignRecipientID.String() + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + c.LogAuthError(err) + return errs.Wrap(err) + } + if !isAuthorized { + c.AuditLogNotAuthorized(ae) + return errs.ErrAuthorizationFailed + } + // get campaignRecipient + campaignRecipient, err := c.CampaignRecipientRepository.GetByID( + ctx, + campaignRecipientID, + &repository.CampaignRecipientOption{ + WithCampaign: true, + }, + ) + if err != nil { + c.Logger.Errorw("failed to get campaign recipient by id", "error", err) + return errs.Wrap(err) + } + campaign := campaignRecipient.Campaign + // check if the campaign recipient is in a active campaign + if !campaign.IsActive() { + c.Logger.Debugw("failed to cancel campaign recipient by id: campaign is inactive", + "campaignID", campaign.ID.MustGet().String(), + ) + return errors.New("campaign is inactive") + } + + if !campaignRecipient.CancelledAt.IsNull() && + campaignRecipient.CancelledAt.MustGet().Before(time.Now()) { + c.Logger.Debugw("failed to cancel campaign recipient by id: already cancelled", + "campaignrecipientID", campaignRecipientID.String(), + ) + return errors.New("campaign recipient already cancelled") + } + campaignRecipient.SentAt = nullable.NewNullableWithValue(time.Now()) + err = c.CampaignRecipientRepository.UpdateByID( + ctx, + campaignRecipientID, + campaignRecipient, + ) + if err != nil { + c.Logger.Errorw("wailed to cancel campaign recipient by recipient id", "error", err) + return errs.Wrap(err) + } + // create an event for the sent email + id := uuid.New() + campaignID := campaignRecipient.CampaignID.MustGet() + recipientID := campaignRecipient.RecipientID.MustGet() + + var campaignEvent *model.CampaignEvent + + if campaign.IsAnonymous.MustGet() { + campaignEvent = &model.CampaignEvent{ + ID: &id, + CampaignID: &campaignID, + RecipientID: nil, + IP: vo.NewOptionalString64Must(""), + UserAgent: vo.NewOptionalString255Must(""), + EventID: cache.EventIDByName[data.EVENT_CAMPAIGN_RECIPIENT_MESSAGE_SENT], + Data: vo.NewEmptyOptionalString1MB(), + } + } else { + campaignEvent = &model.CampaignEvent{ + ID: &id, + CampaignID: &campaignID, + RecipientID: &recipientID, + IP: vo.NewOptionalString64Must(""), + UserAgent: vo.NewOptionalString255Must(""), + EventID: cache.EventIDByName[data.EVENT_CAMPAIGN_RECIPIENT_MESSAGE_SENT], + Data: vo.NewEmptyOptionalString1MB(), + } + } + + err = c.CampaignRepository.SaveEvent(ctx, campaignEvent) + if err != nil { + return errs.Wrap(err) + } + c.AuditLogAuthorized(ae) + // handle webhook + webhookID, err := c.CampaignRepository.GetWebhookIDByCampaignID(ctx, &campaignID) + if err != nil && !errors.Is(err, gorm.ErrRecordNotFound) { + c.Logger.Errorw("failed to get webhook id by campaign id", "error", err) + return errs.Wrap(err) + } + if errors.Is(err, gorm.ErrRecordNotFound) || webhookID == nil { + return nil + } + err = c.HandleWebhook( + ctx, + webhookID, + &campaignID, + &recipientID, + data.EVENT_CAMPAIGN_RECIPIENT_MESSAGE_SENT, + ) + if err != nil { + return errs.Wrap(err) + } + return nil +} + +// HandleWebhook handles a webhook +// it must only be called from secure contexts as it is not checked for permissions +func (c *Campaign) HandleWebhook( + ctx context.Context, + webhookID *uuid.UUID, + campaignID *uuid.UUID, + recipientID *uuid.UUID, + eventName string, +) error { + campaignName, err := c.CampaignRepository.GetNameByID(ctx, campaignID) + if err != nil { + return errs.Wrap(err) + } + email, err := c.RecipientRepository.GetEmailByID(ctx, recipientID) + if err != nil && !errors.Is(err, gorm.ErrRecordNotFound) { + return errs.Wrap(err) + } + webhook, err := c.WebhookRepository.GetByID(ctx, webhookID) + if err != nil { + return errs.Wrap(err) + } + now := time.Now() + webhookReq := WebhookRequest{ + Time: &now, + CampaignName: campaignName, + Event: eventName, + } + if email != nil { + webhookReq.Email = email.String() + } + // the webhook is handles as a different go routine + // so we don't block the campaign handling thread + go func() { + c.Logger.Debugw("sending webhook", "url", webhook.URL.MustGet().String()) + _, err := c.WebhookService.Send(ctx, webhook, &webhookReq) + if err != nil { + c.Logger.Errorw("failed to send webhook", "error", err) + } + c.Logger.Debugw("sending webhook completed", "url", webhook.URL.MustGet().String()) + }() + return nil +} + +// AnonymizeByID anonymizes a campaign including the events +func (c *Campaign) AnonymizeByID( + ctx context.Context, + session *model.Session, + id *uuid.UUID, +) error { + ae := NewAuditEvent("Campaign.AnonymizeByID", session) + ae.Details["id"] = id.String() + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + c.LogAuthError(err) + return errs.Wrap(err) + } + if !isAuthorized { + return errs.ErrAuthorizationFailed + } + // get campaign to check it exists and etc + campaign, err := c.CampaignRepository.GetByID( + ctx, + id, + &repository.CampaignOption{}, + ) + if err != nil { + c.Logger.Errorw("failed to get campaign by id", "error", err) + return errs.Wrap(err) + } + // check if campaign is active, cause then it should be closed before continueing + if campaign.IsActive() { + err = c.closeCampaign( + ctx, + session, + id, + campaign, + "campaign is not active", + ) + } + if err != nil { + c.Logger.Errorw("failed to close campaign by id before anonymization", "error", err) + return errs.Wrap(err) + } + // assign a anonymized ID to each campaign recipient and make a map between + // their ID and the anonymized ID, this is a itermidiate step to anonymize the events + // where campaign receipients have both a anonymized ID and the recipient ID + campaignRecipients, err := c.CampaignRecipientRepository.GetByCampaignID( + ctx, + id, + &repository.CampaignRecipientOption{}, + ) + if err != nil { + c.Logger.Errorw("failed to get campaign recipients by campaign id", "error", err) + return errs.Wrap(err) + } + for _, cr := range campaignRecipients { + if cr.RecipientID.IsNull() { + c.Logger.Debug("skipping anonymization of campaign recipient without recipient") + continue + } + // add anonymized ID to each campaign recipient + anonymizedID := uuid.New() + cr.AnonymizedID = nullable.NewNullableWithValue(anonymizedID) + recipientID := cr.RecipientID.MustGet() + err := c.CampaignRecipientRepository.Anonymize(ctx, &recipientID, &anonymizedID) + if err != nil { + c.Logger.Errorw("failed to add anonymized ID to campaign recipient", "error", err) + return errs.Wrap(err) + } + // anonymize events and assign each anonymized ID so the events can still be tracked + campaignID, err := cr.CampaignID.Get() + if err != nil { + c.Logger.Debug("Recipient removed or anonymized, skipping in anonymization") + continue + } + err = c.CampaignRepository.AnonymizeCampaignEvent( + ctx, + &campaignID, + &recipientID, + &anonymizedID, + ) + if err != nil { + c.Logger.Errorw("failed to anonymize campaign event", "error", err) + return errs.Wrap(err) + } + } + // delete the relation between the campaign and the recipient groups + err = c.CampaignRepository.RemoveCampaignRecipientGroups(ctx, id) + if err != nil { + c.Logger.Errorw("failed to delete campaign recipient groups by campaign id", "error", err) + return errs.Wrap(err) + } + // remove the recipient ID from the campaign recipient so only the anomymized ID is left + err = c.CampaignRecipientRepository.RemoveRecipientIDByCampaignID(ctx, id) + if err != nil { + c.Logger.Errorw("failed to remove recipient ID from campaign recipients", "error", err) + return errs.Wrap(err) + } + // finally add a timestamp to the campaign to indicate when it was anonymized + err = c.CampaignRepository.AddAnonymizedAt(ctx, id) + c.AuditLogAuthorized(ae) + + return nil +} + +// SetNotableCampaignEvent checks and update if most notable event for a campaign +func (c *Campaign) setMostNotableCampaignEvent( + ctx context.Context, + campaign *model.Campaign, + eventName string, +) error { + currentEventID, _ := campaign.NotableEventID.Get() + notableEventID, _ := cache.EventIDByName[eventName] + if cache.IsMoreNotableCampaignRecipientEventID( + ¤tEventID, + notableEventID, + ) { + campaign.NotableEventID.Set(*notableEventID) + cid := campaign.ID.MustGet() + err := c.CampaignRepository.UpdateByID( + ctx, + &cid, + campaign, + ) + if err != nil { + c.Logger.Errorw("failed to update notable campaign event", "error", err) + return errs.Wrap(err) + } + } + return nil +} + +// SetNotableCampaignRecipientEvent checks and update if most notable event for campaign recipient +func (c *Campaign) SetNotableCampaignRecipientEvent( + ctx context.Context, + campaignRecipient *model.CampaignRecipient, + eventName string, +) error { + currentNotableEventID, _ := campaignRecipient.NotableEventID.Get() + notableEventID, _ := cache.EventIDByName[eventName] + if cache.IsMoreNotableCampaignRecipientEventID( + ¤tNotableEventID, + notableEventID, + ) { + campaignRecipient.NotableEventID.Set(*notableEventID) + crid := campaignRecipient.ID.MustGet() + err := c.CampaignRecipientRepository.UpdateByID( + ctx, + &crid, + campaignRecipient, + ) + if err != nil { + c.Logger.Errorw("failed to save updating notable campaign recipient event", "error", err) + return errs.Wrap(err) + } + } + return nil +} + +// GenerateCampaignStats generates and stores campaign statistics when a campaign is closed +func (c *Campaign) GenerateCampaignStats(ctx context.Context, session *model.Session, campaignID *uuid.UUID) error { + c.Logger.Debugw("starting campaign stats generation", "campaignID", campaignID.String()) + + // Check if stats already exist for this campaign to prevent duplicates + existingStats, err := c.CampaignRepository.GetCampaignStats(ctx, campaignID) + if err == nil && existingStats != nil { + c.Logger.Debugw("campaign stats already exist, skipping generation", "campaignID", campaignID.String()) + return nil + } + // Continue if record not found or table doesn't exist (which is expected for new stats) + c.Logger.Debugw("no existing stats found, proceeding with generation", "campaignID", campaignID.String(), "checkError", err) + + // Get the campaign without joins to avoid SQL ambiguity + campaign, err := c.CampaignRepository.GetByID(ctx, campaignID, &repository.CampaignOption{}) + if err != nil { + c.Logger.Errorw("failed to get campaign for stats", "error", err, "campaignID", campaignID.String()) + return errs.Wrap(err) + } + + campaignName := campaign.Name.MustGet().String() + c.Logger.Debugf("retrieved campaign for stats", "campaignID", campaignID.String(), "campaignName", campaignName) + + // Get campaign result stats (existing method) + resultStats, err := c.CampaignRepository.GetResultStats(ctx, campaignID) + if err != nil { + c.Logger.Errorw("failed to get result stats", "error", err, "campaignID", campaignID.String()) + return errs.Wrap(err) + } + c.Logger.Debugf("retrieved result stats", "campaignID", campaignID.String(), "recipients", resultStats.Recipients) + + // Calculate rates + openRate := float64(0) + clickRate := float64(0) + submissionRate := float64(0) + + if resultStats.Recipients > 0 { + openRate = (float64(resultStats.TrackingPixelLoaded) / float64(resultStats.Recipients)) * 100 + clickRate = (float64(resultStats.WebsiteLoaded) / float64(resultStats.Recipients)) * 100 + submissionRate = (float64(resultStats.SubmittedData) / float64(resultStats.Recipients)) * 100 + } + + // Determine campaign type + campaignType := "scheduled" + if campaign.SendStartAt == nil && campaign.SendEndAt == nil { + campaignType = "self-managed" + } + + // Get template name with proper session + templateName := "" + templateID := campaign.TemplateID.MustGet() + template, err := c.CampaignTemplateService.GetByID(ctx, session, &templateID, &repository.CampaignTemplateOption{}) + if err == nil && template != nil && !template.Name.IsNull() { + templateName = template.Name.MustGet().String() + } + + // Create time pointers + now := time.Now() + + var companyID *uuid.UUID + if !campaign.CompanyID.IsNull() { + id := campaign.CompanyID.MustGet() + companyID = &id + } + // companyID can be nil for global campaigns + + var sendStartAt *time.Time + if !campaign.SendStartAt.IsNull() { + t := campaign.SendStartAt.MustGet() + sendStartAt = &t + } + + var sendEndAt *time.Time + if !campaign.SendEndAt.IsNull() { + t := campaign.SendEndAt.MustGet() + sendEndAt = &t + } + + var closedAt *time.Time + if !campaign.ClosedAt.IsNull() { + t := campaign.ClosedAt.MustGet() + closedAt = &t + } + + // Create campaign stats record + id := uuid.New() + stats := &database.CampaignStats{ + ID: &id, + CampaignID: campaignID, + CampaignName: campaignName, + CompanyID: companyID, + CampaignStartDate: sendStartAt, + CampaignEndDate: sendEndAt, + CampaignClosedAt: closedAt, + TotalRecipients: int(resultStats.Recipients), + TotalEvents: 0, // Will be calculated from events + EmailsSent: int(resultStats.EmailsSent), + TrackingPixelLoaded: int(resultStats.TrackingPixelLoaded), + WebsiteVisits: int(resultStats.WebsiteLoaded), + DataSubmissions: int(resultStats.SubmittedData), + OpenRate: openRate, + ClickRate: clickRate, + SubmissionRate: submissionRate, + + TemplateName: templateName, + CampaignType: campaignType, + CreatedAt: &now, + UpdatedAt: &now, + } + + // Insert the stats + c.Logger.Debugf("inserting campaign stats", "campaignID", campaignID.String(), "statsID", stats.ID.String()) + err = c.CampaignRepository.InsertCampaignStats(ctx, stats) + if err != nil { + c.Logger.Errorw("failed to insert campaign stats", "error", err, "campaignID", campaignID.String()) + return errs.Wrap(err) + } + + c.Logger.Debugf("successfully inserted campaign stats", "campaignID", campaignID.String(), "statsID", stats.ID.String()) + return nil +} + +// GetCampaignStats retrieves campaign statistics by campaign ID +func (c *Campaign) GetCampaignStats(ctx context.Context, session *model.Session, campaignID *uuid.UUID) (*database.CampaignStats, error) { + // Check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + c.LogAuthError(err) + return nil, errs.Wrap(err) + } + if !isAuthorized { + return nil, errs.ErrAuthorizationFailed + } + + return c.CampaignRepository.GetCampaignStats(ctx, campaignID) +} + +// GetAllCampaignStats retrieves all campaign statistics with pagination +func (c *Campaign) GetAllCampaignStats(ctx context.Context, session *model.Session, options *vo.QueryArgs, companyID *uuid.UUID) (*model.Result[database.CampaignStats], error) { + result := model.NewEmptyResult[database.CampaignStats]() + + // Check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + c.LogAuthError(err) + return result, errs.Wrap(err) + } + if !isAuthorized { + return result, errs.ErrAuthorizationFailed + } + + // Get the data + stats, err := c.CampaignRepository.GetAllCampaignStats(ctx, companyID, options) + if err != nil { + return result, errs.Wrap(err) + } + + // Convert to result format with pointers + rows := make([]*database.CampaignStats, len(stats)) + for i := range stats { + rows[i] = &stats[i] + } + + result.Rows = rows + result.HasNextPage = false // Simple implementation without pagination for now + + return result, nil +} diff --git a/backend/service/campaignTemplate.go b/backend/service/campaignTemplate.go new file mode 100644 index 0000000..f1be7a1 --- /dev/null +++ b/backend/service/campaignTemplate.go @@ -0,0 +1,706 @@ +package service + +import ( + "context" + + "github.com/go-errors/errors" + + "github.com/google/uuid" + "github.com/oapi-codegen/nullable" + "github.com/phishingclub/phishingclub/data" + "github.com/phishingclub/phishingclub/errs" + "github.com/phishingclub/phishingclub/model" + "github.com/phishingclub/phishingclub/repository" + "github.com/phishingclub/phishingclub/validate" + "github.com/phishingclub/phishingclub/vo" + "gorm.io/gorm" +) + +// CampaignTemplate is a campaign template service +type CampaignTemplate struct { + Common + CampaignTemplateRepository *repository.CampaignTemplate + CampaignRepository *repository.Campaign + IdentifierRepository *repository.Identifier +} + +// Create creates a new campaign template +func (c *CampaignTemplate) Create( + ctx context.Context, + session *model.Session, + campaignTemplate *model.CampaignTemplate, +) (*uuid.UUID, error) { + ae := NewAuditEvent("CampaignTemplate.Create", session) + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + c.LogAuthError(err) + return nil, errs.Wrap(err) + } + if !isAuthorized { + c.AuditLogNotAuthorized(ae) + return nil, errs.ErrAuthorizationFailed + } + // check uniqueness + var companyID *uuid.UUID + if cid, err := campaignTemplate.CompanyID.Get(); err == nil { + companyID = &cid + } + name := campaignTemplate.Name.MustGet() + isOK, err := repository.CheckNameIsUnique( + ctx, + c.CampaignRepository.DB, + "campaign_templates", + name.String(), + companyID, + nil, + ) + if err != nil { + c.Logger.Errorw("failed to check campaign template uniqueness", "error", err) + return nil, errs.Wrap(err) + } + if !isOK { + c.Logger.Debugw("campagin template name is already taken", "error", name.String()) + return nil, validate.WrapErrorWithField(errors.New("is not unique"), "name") + } + // if no urlIdentifierID is set, get the id of the name 'id' + if !campaignTemplate.URLIdentifierID.IsSpecified() || campaignTemplate.URLIdentifierID.IsNull() { + urlIdentifier, err := c.IdentifierRepository.GetByName(ctx, "id") + if err != nil { + c.Logger.Errorw("failed to get url identifier by name", "error", err) + return nil, errs.Wrap(err) + } + campaignTemplate.URLIdentifierID = urlIdentifier.ID + } + // if no cookieIdentifierID is set, get the id of the name 'session' + if !campaignTemplate.StateIdentifierID.IsSpecified() || campaignTemplate.StateIdentifierID.IsNull() { + stateIdentifier, err := c.IdentifierRepository.GetByName(ctx, "p") + if err != nil { + c.Logger.Errorw("failed to get state identifier by name", "error", err) + return nil, errs.Wrap(err) + } + campaignTemplate.StateIdentifierID = stateIdentifier.ID + } + // if no path set to '' + if !campaignTemplate.URLPath.IsSpecified() || campaignTemplate.URLPath.IsNull() { + campaignTemplate.URLPath = nullable.NewNullableWithValue(*vo.NewURLPathMust("")) + } + // validate + if err := campaignTemplate.Validate(); err != nil { + c.Logger.Errorw("failed to validate campaign template", "error", err) + return nil, errs.Wrap(err) + } + // create + id, err := c.CampaignTemplateRepository.Insert(ctx, campaignTemplate) + if err != nil { + c.Logger.Errorw("failed to create campaign template", "error", err) + return nil, errs.Wrap(err) + } + ae.Details["id"] = id.String() + c.AuditLogAuthorized(ae) + + return id, nil +} + +// GetByID gets a campaign template by id +func (c *CampaignTemplate) GetByID( + ctx context.Context, + session *model.Session, + id *uuid.UUID, + options *repository.CampaignTemplateOption, +) (*model.CampaignTemplate, error) { + ae := NewAuditEvent("CampaignTemplate.GetById", session) + ae.Details["id"] = id.String() + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + c.LogAuthError(err) + return nil, errs.Wrap(err) + } + if !isAuthorized { + c.AuditLogNotAuthorized(ae) + return nil, errs.ErrAuthorizationFailed + } + // get the campaign template + tmpl, err := c.CampaignTemplateRepository.GetByID(ctx, id, options) + if err != nil { + c.Logger.Errorw("wailed to get campaign template by id", "error", err) + return nil, errs.Wrap(err) + } + // no audit on read + return tmpl, nil +} + +// GetByCompanyID gets a campaign templates by company id +func (c *CampaignTemplate) GetByCompanyID( + ctx context.Context, + session *model.Session, + companyID *uuid.UUID, + options *repository.CampaignTemplateOption, +) (*model.Result[model.CampaignTemplate], error) { + result := model.NewEmptyResult[model.CampaignTemplate]() + ae := NewAuditEvent("CampaignTemplate.GetByCompanyID", session) + if companyID != nil { + ae.Details["companyId"] = companyID.String() + } + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + c.LogAuthError(err) + return result, errs.Wrap(err) + } + if !isAuthorized { + c.AuditLogNotAuthorized(ae) + return result, errs.ErrAuthorizationFailed + } + // get the campaign template + result, err = c.CampaignTemplateRepository.GetAllByCompanyID(ctx, companyID, options) + if err != nil { + c.Logger.Errorw("failed to get campaign templates by company id", "error", err) + return result, errs.Wrap(err) + } + // no audit on read + return result, nil +} + +// GetAll gets all campaign templates +func (c *CampaignTemplate) GetAll( + ctx context.Context, + session *model.Session, + companyID *uuid.UUID, + pagination *vo.Pagination, + options *repository.CampaignTemplateOption, +) (*model.Result[model.CampaignTemplate], error) { + result := model.NewEmptyResult[model.CampaignTemplate]() + ae := NewAuditEvent("CampaignTemplate.GetAll", session) + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + c.LogAuthError(err) + return result, errs.Wrap(err) + } + if !isAuthorized { + c.AuditLogNotAuthorized(ae) + return result, errs.ErrAuthorizationFailed + } + // get all campaign templates + result, err = c.CampaignTemplateRepository.GetAll( + ctx, + companyID, + options, + ) + if err != nil { + c.Logger.Errorw("failed to get all campaign templates", "error", err) + return result, errs.Wrap(err) + } + // no audit on read + return result, nil +} + +// RemoveAPISenderByAPISenderID removes the api sender id ID from a templates +// this makes the templates unusable until a domain has been added. +func (c *CampaignTemplate) removeAPISenderIDBySenderID( + ctx context.Context, + session *model.Session, + apiSenderID *uuid.UUID, +) error { + ae := NewAuditEvent("CampaignTemplate.RemoveAPISenderIDBySenderID", session) + if apiSenderID != nil { + ae.Details["apiSenderId"] = apiSenderID.String() + } + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + c.LogAuthError(err) + return err + } + if !isAuthorized { + c.AuditLogNotAuthorized(ae) + return errs.ErrAuthorizationFailed + } + + // all active campaigns that use this template ID which is becoming unusable due to + // the domain being removed will be set to close on the next schedule tick. + templatesAffected, err := c.CampaignTemplateRepository.GetByAPISenderID( + ctx, + apiSenderID, + &repository.CampaignTemplateOption{}, + ) + if err != nil { + c.Logger.Errorw("failed to get affected campaign templates", "error", err) + return err + } + templateIDs := []*uuid.UUID{} + for _, t := range templatesAffected { + id := t.ID.MustGet() + templateIDs = append(templateIDs, &id) + } + campaignsAffected, err := c.CampaignRepository.GetByTemplateIDs(ctx, templateIDs) + if err != nil { + c.Logger.Errorw("failed to get affected campaigns by template IDs", "error", err) + return err + } + + for _, campaign := range campaignsAffected { + if !campaign.IsActive() { + continue + } + err := campaign.Close() + if err != nil { + c.Logger.Errorw("failed to close to campagin", "error", err) + } + campaignID := campaign.ID.MustGet() + err = c.CampaignRepository.UpdateByID( + ctx, + &campaignID, + campaign, + ) + if err != nil { + c.Logger.Errorw("failed to update closed campagin", "error", err) + } + } + // remove the domain id from the templates + err = c.CampaignTemplateRepository.RemoveAPISenderIDFromAll(ctx, apiSenderID) + if err != nil { + c.Logger.Errorw("failed to remove domain ID from all campaign templates", "error", err) + return err + } + return nil +} + +// RemoveDomainByDomainID removes the domain ID from a template +// this makes the template unusable until a domain has been added. +func (c *CampaignTemplate) RemoveDomainByDomainID( + ctx context.Context, + session *model.Session, + domainID *uuid.UUID, +) error { + ae := NewAuditEvent("CampaignTemplate.RemoveDomainByDomainID", session) + if domainID != nil { + ae.Details["domainID"] = domainID.String() + } + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + c.LogAuthError(err) + return err + } + if !isAuthorized { + c.AuditLogNotAuthorized(ae) + return errs.ErrAuthorizationFailed + } + + // all active campaigns that use this template ID which is becoming unusable due to + // the domain being removed will be set to close on the next schedule tick. + templatesAffected, err := c.CampaignTemplateRepository.GetByDomainID( + ctx, + domainID, + &repository.CampaignTemplateOption{}, + ) + if err != nil { + c.Logger.Errorw("failed to get affected campaign templates", "error", err) + return err + } + templateIDs := []*uuid.UUID{} + for _, t := range templatesAffected { + id := t.ID.MustGet() + templateIDs = append(templateIDs, &id) + } + campaignsAffected, err := c.CampaignRepository.GetByTemplateIDs(ctx, templateIDs) + if err != nil { + c.Logger.Errorw("failed to get affected campaigns", "error", err) + return err + } + + for _, campaign := range campaignsAffected { + if !campaign.IsActive() { + continue + } + err := campaign.Close() + if err != nil { + c.Logger.Errorw("failed to close campaign", "error", err) + } + campaignID := campaign.ID.MustGet() + err = c.CampaignRepository.UpdateByID( + ctx, + &campaignID, + campaign, + ) + if err != nil { + c.Logger.Errorw("failed to update closed campaign", "error", err) + } + } + // remove the domain id from the templates + err = c.CampaignTemplateRepository.RemoveDomainIDFromAll(ctx, domainID) + if err != nil { + c.Logger.Errorw("failed to remove domain ID from all campaign templates", "error", err) + return err + } + return nil +} + +// RemoveSmtpBySmtpID removes the smtp configuration ID from a template +// this makes the template unusable until a domain has been added. +func (c *CampaignTemplate) RemoveSmtpBySmtpID( + ctx context.Context, + session *model.Session, + smtpID *uuid.UUID, +) error { + ae := NewAuditEvent("CampaignTemplate.RemoveSmtpBySmtpID", session) + ae.Details["smtpId"] = smtpID.String() + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + c.LogAuthError(err) + return err + } + if !isAuthorized { + c.AuditLogNotAuthorized(ae) + return errs.ErrAuthorizationFailed + } + // all active campaigns that use this template ID which is becoming unusable due to + // the domain being removed will be set to close on the next schedule tick. + // templatesAffected, err := c.CampaignTemplateRepository.GetBySmtpID( + templatesAffected, err := c.CampaignTemplateRepository.GetBySmtpID( + ctx, + smtpID, + &repository.CampaignTemplateOption{}, + ) + if err != nil { + c.Logger.Errorw("failed to get affected campaign templates", "error", err) + return err + } + templateIDs := []*uuid.UUID{} + for _, t := range templatesAffected { + id := t.ID.MustGet() + templateIDs = append(templateIDs, &id) + } + campaignsAffected, err := c.CampaignRepository.GetByTemplateIDs(ctx, templateIDs) + if err != nil { + c.Logger.Errorw("failed to get affected campaigns", "error", err) + return err + } + + for _, campaign := range campaignsAffected { + if !campaign.IsActive() { + continue + } + err := campaign.Close() + if err != nil { + c.Logger.Errorw("failed to close campaign", "error", err) + } + campaignID := campaign.ID.MustGet() + err = c.CampaignRepository.UpdateByID( + ctx, + &campaignID, + campaign, + ) + if err != nil { + c.Logger.Errorw("failed to update closed campaign", "error", err) + } + } + // remove the domain id from the templates + err = c.CampaignTemplateRepository.RemoveSmtpIDFromAll(ctx, smtpID) + if err != nil { + c.Logger.Errorw("failed to remove domain ID from all campaign templates", "error", err) + return err + } + return nil +} + +// RemovePageByPageID removes the page ID from a template +// this makes the template unusable until a domain has been added. +func (c *CampaignTemplate) RemovePagesByPageID( + ctx context.Context, + session *model.Session, + pageID *uuid.UUID, +) error { + ae := NewAuditEvent("CampaignTemplate.RemovePagesByPageID", session) + ae.Details["pageId"] = pageID.String() + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + c.LogAuthError(err) + return err + } + if !isAuthorized { + c.AuditLogNotAuthorized(ae) + return errs.ErrAuthorizationFailed + } + + // all active campaigns that use this template ID which is becoming unusable due to + // the page being removed will be set to close on the next schedule tick. + templatesAffected, err := c.CampaignTemplateRepository.GetByPageID( + ctx, + pageID, + &repository.CampaignTemplateOption{}, + ) + if err != nil { + c.Logger.Errorw("failed to get affected campaign templates", "error", err) + return err + } + templateIDs := []*uuid.UUID{} + for _, t := range templatesAffected { + id := t.ID.MustGet() + templateIDs = append(templateIDs, &id) + } + campaignsAffected, err := c.CampaignRepository.GetByTemplateIDs(ctx, templateIDs) + if err != nil { + c.Logger.Errorw("failed to get affected campaigns", "error", err) + return err + } + + for _, campaign := range campaignsAffected { + if !campaign.IsActive() { + continue + } + err := campaign.Close() + if err != nil { + c.Logger.Errorw("failed to close campagin", "error", err) + } + campaignID := campaign.ID.MustGet() + err = c.CampaignRepository.UpdateByID( + ctx, + &campaignID, + campaign, + ) + if err != nil { + c.Logger.Errorw("failed to update closed campaign", "error", err) + } + } + // remove the domain id from the templates + err = c.CampaignTemplateRepository.RemovePageIDFromAll(ctx, pageID) + if err != nil { + c.Logger.Errorw("failed to remove page ID from all campaign templates", "error", err) + return err + } + return nil +} + +// UpdateByID updates a campaign template by id +func (c *CampaignTemplate) UpdateByID( + ctx context.Context, + session *model.Session, + id *uuid.UUID, + campaignTemplate *model.CampaignTemplate, +) error { + ae := NewAuditEvent("CampaignTemplate.UpdateByID", session) + ae.Details["id"] = id.String() + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + c.LogAuthError(err) + return err + } + if !isAuthorized { + c.AuditLogNotAuthorized(ae) + return errs.ErrAuthorizationFailed + } + /* TODO consider to reintroduce this, but only stop updates towards templates that are used in scheduled or + not stopped/closed campaigns + + // if this template is used by a campaign, we cannot update it as it has been used for scheduling + campaignCount, err := c.CampaignRepository.GetCampaignCountByTemplateID(ctx, id) + if err != nil && !errors.Is(err, gorm.ErrRecordNotFound) { + c.Logger.Errorw("failed to get campaign by campaign template id","error ,err) + return err + } + if campaignCount > 0 { + c.Logger.Error("cannot update campaign template as it is used by a campaign") + s := "campaign" + if campaignCount > 1 { + s = "campaigns" + } + return validate.WrapErrorWithField( + fmt.Errorf("template used by %d %s", campaignCount, s), + "cant update", + ) + } + */ + + // get the campaign template and change values + incoming, err := c.CampaignTemplateRepository.GetByID(ctx, id, nil) + if errors.Is(err, gorm.ErrRecordNotFound) { + c.Logger.Errorw("campaign template not found", "error", err) + return err + } + if err != nil { + c.Logger.Errorw("failed to update campaign template by id", "error", err) + return err + } + // update the campaign CampaignTemplate + if v, err := campaignTemplate.Name.Get(); err == nil { + // check uniqueness + var companyID *uuid.UUID + if cid, err := campaignTemplate.CompanyID.Get(); err == nil { + companyID = &cid + } + name := campaignTemplate.Name.MustGet() + isOK, err := repository.CheckNameIsUnique( + ctx, + c.CampaignRepository.DB, + "campaign_templates", + name.String(), + companyID, + id, + ) + if err != nil { + c.Logger.Errorw("failed to check campaign template uniqueness", "error", err) + return err + } + if !isOK { + c.Logger.Debugw("campagin template name is already taken", "error", name.String()) + return validate.WrapErrorWithField(errors.New("is not unique"), "name") + } + incoming.Name.Set(v) + } + if campaignTemplate.DomainID.IsSpecified() { + if v, err := campaignTemplate.DomainID.Get(); err == nil { + incoming.DomainID.Set(v) + } else { + incoming.DomainID.SetNull() + } + } + if campaignTemplate.SMTPConfigurationID.IsSpecified() { + if v, err := campaignTemplate.SMTPConfigurationID.Get(); err == nil { + incoming.SMTPConfigurationID.Set(v) + incoming.APISenderID.SetNull() + } else { + incoming.SMTPConfigurationID.SetNull() + } + } + if campaignTemplate.APISenderID.IsSpecified() { + if v, err := campaignTemplate.APISenderID.Get(); err == nil { + incoming.APISenderID.Set(v) + incoming.SMTPConfigurationID.SetNull() + } else { + incoming.APISenderID.SetNull() + } + } + if campaignTemplate.EmailID.IsSpecified() { + if v, err := campaignTemplate.EmailID.Get(); err == nil { + incoming.EmailID.Set(v) + } else { + incoming.EmailID.SetNull() + } + } + if campaignTemplate.BeforeLandingPageID.IsSpecified() { + if v, err := campaignTemplate.BeforeLandingPageID.Get(); err == nil { + incoming.BeforeLandingPageID.Set(v) + } else { + incoming.BeforeLandingPageID.SetNull() + } + } + if campaignTemplate.LandingPageID.IsSpecified() { + if v, err := campaignTemplate.LandingPageID.Get(); err == nil { + incoming.LandingPageID.Set(v) + } else { + incoming.LandingPageID.SetNull() + } + } + if campaignTemplate.AfterLandingPageID.IsSpecified() { + if v, err := campaignTemplate.AfterLandingPageID.Get(); err == nil { + incoming.AfterLandingPageID.Set(v) + } else { + incoming.AfterLandingPageID.SetNull() + } + } + if campaignTemplate.AfterLandingPageRedirectURL.IsSpecified() { + if v, err := campaignTemplate.AfterLandingPageRedirectURL.Get(); err == nil { + incoming.AfterLandingPageRedirectURL.Set(v) + } else { + incoming.AfterLandingPageRedirectURL.SetNull() + } + } + if v, err := campaignTemplate.URLIdentifierID.Get(); err == nil { + incoming.URLIdentifierID.Set(v) + } + if v, err := campaignTemplate.StateIdentifierID.Get(); err == nil { + incoming.StateIdentifierID.Set(v) + } + if v, err := campaignTemplate.URLPath.Get(); err == nil { + incoming.URLPath.Set(v) + } + // validate + if err := incoming.Validate(); err != nil { + c.Logger.Errorw("failed to validate campaign template", "error", err) + return err + } + err = c.CampaignTemplateRepository.UpdateByID( + ctx, + id, + incoming, + ) + if err != nil { + c.Logger.Errorw("failed to update campaign template by id", "error", err) + return err + } + c.AuditLogAuthorized(ae) + + return nil +} + +// DeleteByID deletes a campaign template by id +func (c *CampaignTemplate) DeleteByID( + ctx context.Context, + session *model.Session, + id *uuid.UUID, +) error { + ae := NewAuditEvent("CampaignTemplate.DeleteByID", session) + ae.Details["id"] = id.String() + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + c.LogAuthError(err) + return err + } + if !isAuthorized { + c.AuditLogNotAuthorized(ae) + return errs.ErrAuthorizationFailed + } + + // all active campaigns that use this template ID which is becoming unusable due to + // the domain being removed will be set to close on the next schedule tick. + campaignsAffected, err := c.CampaignRepository.GetByTemplateIDs( + ctx, + []*uuid.UUID{id}, + ) + if err != nil { + c.Logger.Errorw( + "failed to get campaign affected by template deletion", + "error", err, + ) + return err + } + for _, campaign := range campaignsAffected { + if !campaign.IsActive() { + continue + } + err := campaign.Close() + if err != nil { + c.Logger.Errorw("failed to close campaign", "error", err) + } + campaignID := campaign.ID.MustGet() + err = c.CampaignRepository.UpdateByID( + ctx, + &campaignID, + campaign, + ) + if err != nil { + c.Logger.Errorw("failed to update closed campaign", "error", err) + } + } + // remove the campaign template id from campaigns + err = c.CampaignRepository.RemoveCampaignTemplateIDFromCampaigns( + ctx, + id, + ) + // delete the campaign template + err = c.CampaignTemplateRepository.DeleteByID(ctx, id) + if err != nil { + c.Logger.Errorw("failed to delete campaign template by id", "error", err) + return err + } + c.AuditLogAuthorized(ae) + return nil +} diff --git a/backend/service/company.go b/backend/service/company.go new file mode 100644 index 0000000..bedbffa --- /dev/null +++ b/backend/service/company.go @@ -0,0 +1,538 @@ +package service + +import ( + "context" + + "github.com/go-errors/errors" + + "github.com/gin-gonic/gin" + "github.com/google/uuid" + "github.com/phishingclub/phishingclub/data" + "github.com/phishingclub/phishingclub/errs" + "github.com/phishingclub/phishingclub/model" + "github.com/phishingclub/phishingclub/repository" + "github.com/phishingclub/phishingclub/validate" + "github.com/phishingclub/phishingclub/vo" + "gorm.io/gorm" +) + +// Company is the Company service +type Company struct { + Common + DomainService *Domain + PageService *Page + EmailService *Email + SMTPConfigurationService *SMTPConfiguration + APISenderService *APISender + RecipientService *Recipient + RecipientGroupService *RecipientGroup + CampaignService *Campaign + CampaignTemplate *CampaignTemplate + AllowDenyService *AllowDeny + WebhookService *Webhook + CompanyRepository *repository.Company +} + +// Create creates a company +func (s *Company) Create( + ctx context.Context, + session *model.Session, + company *model.Company, +) (*model.Company, error) { + ae := NewAuditEvent("Company.Create", session) + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + s.LogAuthError(err) + return nil, errs.Wrap(err) + } + if !isAuthorized { + s.AuditLogNotAuthorized(ae) + return nil, errs.ErrAuthorizationFailed + } + // parse request + name, err := company.Name.Get() + if err != nil { + s.Logger.Debugw("failed to get company name", "error", err) + return nil, errs.Wrap(err) + } + // check if company name is unique, let a TOCTOU error + // happen as a generic error, this is easier than checking + // all database types specific unique contraint errors + _, err = s.CompanyRepository.GetByName( + ctx, + name.String(), + ) + // we expect not to find a company with this name + if err != nil { + // something went wrong + if !errors.Is(err, gorm.ErrRecordNotFound) { + s.Logger.Errorw("failed to check unique company name", "error", err) + return nil, errs.Wrap(err) + } + } + // if there is no error, then the company name is already taken + if err == nil { + // company name is already taken + s.Logger.Debugw("company name is already taken", "error", name.String()) + return nil, validate.WrapErrorWithField(errors.New("not unique"), "name") + } + // create company + createdCompanyID, err := s.CompanyRepository.Insert( + ctx, + company, + ) + if err != nil { + s.Logger.Errorw("failed to create company", "error", err) + return nil, errs.Wrap(err) + } + createdCompany, err := s.CompanyRepository.GetByID( + ctx, + createdCompanyID, + ) + if err != nil { + s.Logger.Errorw("failed to get created company", "error", err) + return nil, errs.Wrap(err) + } + ae.Details["id"] = createdCompanyID.String() + s.AuditLogAuthorized(ae) + + return createdCompany, nil +} + +// GetAll gets all companies with pagination +func (s *Company) GetAll( + ctx context.Context, + session *model.Session, + queryArgs *vo.QueryArgs, +) (*model.Result[model.Company], error) { + result := model.NewEmptyResult[model.Company]() + ae := NewAuditEvent("Company.GetAll", session) + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + s.LogAuthError(err) + return result, errs.Wrap(err) + } + if !isAuthorized { + s.AuditLogNotAuthorized(ae) + return result, errs.ErrAuthorizationFailed + } + result, err = s.CompanyRepository.GetAll( + ctx, + queryArgs, + ) + if err != nil { + s.Logger.Errorw("failed to get companies", "error", err) + return nil, errs.Wrap(err) + } + return result, nil +} + +// GetByID gets a company by ID +func (s *Company) GetByID( + ctx context.Context, + session *model.Session, + companyID *uuid.UUID, +) (*model.Company, error) { + ae := NewAuditEvent("Company.GetByID", session) + if companyID != nil { + ae.Details["companyId"] = companyID.String() + } + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + s.LogAuthError(err) + return nil, errs.Wrap(err) + } + if !isAuthorized { + s.AuditLogNotAuthorized(ae) + return nil, errs.ErrAuthorizationFailed + } + company, err := s.CompanyRepository.GetByID( + ctx, + companyID, + ) + if errors.Is(err, gorm.ErrRecordNotFound) { + // return early, this is not an error + return nil, errs.Wrap(err) + } + if err != nil { + s.Logger.Errorw("failed to get company by id", "error", err) + return nil, errs.Wrap(err) + } + // no audit on read + return company, nil +} + +// Update updates a company by ID +func (s *Company) UpdateByID( + ctx context.Context, + session *model.Session, + id *uuid.UUID, + company *model.Company, +) error { + ae := NewAuditEvent("Company.UpdateByID", session) + ae.Details["id"] = id.String() + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + s.LogAuthError(err) + return err + } + if !isAuthorized { + s.AuditLogNotAuthorized(ae) + return errs.ErrAuthorizationFailed + } + // get current + current, err := s.CompanyRepository.GetByID( + ctx, + id, + ) + if err != nil { + return err + } + name, err := company.Name.Get() + if err != nil { + s.Logger.Debugw("failed to get company name", "error", err) + return err + } + // check if company name is unique, let a TOCTOU error + // happen as a generic error, this is easier than checking + // all database types specific unique contraint errors + _, err = s.CompanyRepository.GetByName( + ctx, + name.String(), + ) + // we expect not to find a company with this name + // so any error is an actual error + if err != nil { + // something went wrong + if !errors.Is(err, gorm.ErrRecordNotFound) { + s.Logger.Debugw("failed to get existing company name", "error", current.Name) + return err + } + } + // if there is no error, then the company name is already taken + if err == nil { + s.Logger.Debugw("company name is already taken", "error", name.String()) + return validate.WrapErrorWithField(errors.New("not unique"), "name") + } + // update changed fields + if v, err := company.Name.Get(); err == nil { + current.Name.Set(v) + } + // validate + if err := company.Validate(); err != nil { + s.Logger.Errorw("failed to validate company", "error", err) + return err + } + + // update company + err = s.CompanyRepository.UpdateByID( + ctx, + id, + current, + ) + if err != nil { + s.Logger.Errorw("failed to update company by id", "error", err) + return err + } + s.AuditLogAuthorized(ae) + return nil +} + +// DeleteByID deletes a company by ID +func (s *Company) DeleteByID( + g *gin.Context, + session *model.Session, + companyID *uuid.UUID, +) (int, error) { + ae := NewAuditEvent("Company.DeleteByID", session) + ae.Details["companyId"] = companyID.String() + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + s.LogAuthError(err) + return 0, errs.Wrap(err) + } + if !isAuthorized { + s.AuditLogNotAuthorized(ae) + return 0, errs.ErrAuthorizationFailed + } + // deleting a company starts a big chain of deletion where all things related + // to the company is deleted + // delete domains owned by the company, this deletes assets owned by the domains + affectedDomains, err := s.DomainService.GetByCompanyID( + g, + session, + companyID, + &repository.DomainOption{}, + ) + if err != nil { + s.Logger.Errorw("error", + "failed get domains that should be deleted due to company deletion", + "error", err, + ) + return 0, errs.Wrap(err) + } + for _, domain := range affectedDomains.Rows { + domainID := domain.ID.MustGet() + err = s.DomainService.DeleteByID( + g, + session, + &domainID, + ) + if err != nil { + s.Logger.Errorw("failed to delete domains related to company", "error", err) + return 0, errs.Wrap(err) + } + } + // delete pages, this also cancels campaings and remove relations that use them + affectedPages, err := s.PageService.GetByCompanyID( + g, + session, + companyID, + &repository.PageOption{}, + ) + if err != nil { + s.Logger.Errorw( + "failed get pages that should be deleted due to company deletion", + "error", err, + ) + return 0, errs.Wrap(err) + } + for _, page := range affectedPages.Rows { + pageID := page.ID.MustGet() + err = s.PageService.DeleteByID( + g, + session, + &pageID, + ) + if err != nil { + s.Logger.Errorw("failed to delete domains related to company", "error", err) + return 0, errs.Wrap(err) + } + } + // delete emails, this also removes attachments + affectedEmails, err := s.EmailService.GetByCompanyID( + g, + session, + companyID, + ) + if err != nil { + s.Logger.Errorw( + "failed get email that should be deleted due to company deletion", + "error", err, + ) + return 0, errs.Wrap(err) + } + for _, email := range affectedEmails.Rows { + emailID := email.ID.MustGet() + err = s.EmailService.DeleteByID( + g, + session, + &emailID, + ) + if err != nil { + s.Logger.Errorw("failed to delete emails related to company", "error", err) + return 0, errs.Wrap(err) + } + } + // delete api senders + affectedApiSenders, err := s.APISenderService.GetByCompanyID( + g, + session, + companyID, + &repository.APISenderOption{}, + ) + if err != nil { + s.Logger.Errorw( + "failed get api sender that should be deleted due to company deletion", + "error", err, + ) + return 0, errs.Wrap(err) + } + for _, apiSender := range affectedApiSenders.Rows { + emailID := apiSender.ID.MustGet() + err = s.APISenderService.DeleteByID( + g, + session, + &emailID, + ) + if err != nil { + s.Logger.Errorw("failed to delete api senders related to company", "error", err) + return 0, errs.Wrap(err) + } + } + // delete groups + affectedGroups, err := s.RecipientGroupService.GetByCompanyID( + g, + session, + companyID, + &repository.RecipientGroupOption{}, + ) + if err != nil { + s.Logger.Errorw( + "failed get recipient groups that should be deleted due to company deletion", + "error", err, + ) + return 0, errs.Wrap(err) + } + for _, group := range affectedGroups { + groupID := group.ID.MustGet() + err = s.RecipientGroupService.DeleteByID( + g, + session, + &groupID, + ) + if err != nil { + s.Logger.Errorw("failed to delete recipient groups related to company", "error", err) + return 0, errs.Wrap(err) + } + } + // delete recipients + affectedRecipients, err := s.RecipientService.GetByCompanyID( + g, + session, + companyID, + &repository.RecipientOption{}, + ) + if err != nil { + s.Logger.Errorw( + "failed get recipients that should be deleted due to company deletion", + "error", err, + ) + return 0, errs.Wrap(err) + } + for _, recipient := range affectedRecipients.Rows { + recpID := recipient.ID.MustGet() + err = s.RecipientService.DeleteByID( + g, + session, + &recpID, + ) + if err != nil { + s.Logger.Errorw("failed to delete recipients related to company", "error", err) + return 0, errs.Wrap(err) + } + } + // delete webhooks + affectedWebhooks, err := s.WebhookService.GetByCompanyID( + g, + session, + companyID, + ) + if err != nil { + s.Logger.Errorw( + "failed get webhooks that should be deleted due to company deletion", + "error", err, + ) + return 0, errs.Wrap(err) + } + for _, webhook := range affectedWebhooks { + recpID := webhook.ID.MustGet() + err = s.WebhookService.DeleteByID( + g, + session, + &recpID, + ) + if err != nil { + s.Logger.Errorw("failed to delete webhooks related to company", "error", err) + return 0, errs.Wrap(err) + } + } + + // delete allow deny + affectedAllowDenies, err := s.AllowDenyService.GetByCompanyID( + g, + session, + companyID, + ) + if err != nil { + s.Logger.Errorw( + "failed get allow denies that should be deleted due to company deletion", + "error", err, + ) + return 0, errs.Wrap(err) + } + for _, allowDeny := range affectedAllowDenies.Rows { + recpID := allowDeny.ID.MustGet() + err = s.AllowDenyService.DeleteByID( + g, + session, + &recpID, + ) + if err != nil { + s.Logger.Errorw("failed to delete allow denies related to company", "error", err) + return 0, errs.Wrap(err) + } + } + // delete templates + affectedTemplates, err := s.CampaignTemplate.GetByCompanyID( + g, + session, + companyID, + &repository.CampaignTemplateOption{}, + ) + if err != nil { + s.Logger.Errorw( + "failed get campaign templates that should be deleted due to company deletion", + "error", err, + ) + return 0, errs.Wrap(err) + } + for _, campaignTemplate := range affectedTemplates.Rows { + cid := campaignTemplate.ID.MustGet() + err = s.CampaignTemplate.DeleteByID( + g, + session, + &cid, + ) + if err != nil { + s.Logger.Errorw("failed to delete campaign template related to company", "error", err) + return 0, errs.Wrap(err) + } + } + // delete campaigns + affectedCampaigns, err := s.CampaignService.GetByCompanyID( + g, + session, + companyID, + &repository.CampaignOption{}, + ) + if err != nil { + s.Logger.Errorw( + "failed get campaigns that should be deleted due to company deletion", + "error", err, + ) + return 0, errs.Wrap(err) + } + for _, campaigns := range affectedCampaigns.Rows { + cid := campaigns.ID.MustGet() + err = s.CampaignService.DeleteByID( + g, + session, + &cid, + ) + if err != nil { + s.Logger.Errorw("failed to delete campaigns related to company", "error", err) + return 0, errs.Wrap(err) + } + } + + // finally delete the company + affectedRows, err := s.CompanyRepository.DeleteByID( + g, + companyID, + ) + if err != nil { + s.Logger.Errorw("failed to delete company by id", "error", err) + return affectedRows, errs.Wrap(err) + } + if affectedRows == 0 { + return affectedRows, nil + } + s.AuditLogAuthorized(ae) + return affectedRows, nil +} diff --git a/backend/service/domain.go b/backend/service/domain.go new file mode 100644 index 0000000..6954fc1 --- /dev/null +++ b/backend/service/domain.go @@ -0,0 +1,674 @@ +package service + +import ( + "bytes" + "context" + "crypto/tls" + "fmt" + "net" + "net/http" + "time" + + "github.com/go-errors/errors" + + "github.com/caddyserver/certmagic" + securejoin "github.com/cyphar/filepath-securejoin" + "github.com/google/uuid" + "github.com/oapi-codegen/nullable" + "github.com/phishingclub/phishingclub/build" + "github.com/phishingclub/phishingclub/data" + "github.com/phishingclub/phishingclub/errs" + "github.com/phishingclub/phishingclub/model" + "github.com/phishingclub/phishingclub/repository" + "github.com/phishingclub/phishingclub/validate" + "github.com/phishingclub/phishingclub/vo" + "gorm.io/gorm" +) + +// Domain is a Domain service +type Domain struct { + Common + OwnManagedCertificatePath string + CertMagicConfig *certmagic.Config + CertMagicCache *certmagic.Cache + DomainRepository *repository.Domain + CompanyRepository *repository.Company + CampaignTemplateService *CampaignTemplate + AssetService *Asset + FileService *File +} + +// Create creates a new domain +func (d *Domain) Create( + ctx context.Context, + session *model.Session, + domain *model.Domain, +) (*uuid.UUID, error) { + ae := NewAuditEvent("Domain.Create", session) + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + d.LogAuthError(err) + return nil, errs.Wrap(err) + } + if !isAuthorized { + d.AuditLogNotAuthorized(ae) + return nil, errs.ErrAuthorizationFailed + } + // validate data + if err := domain.Validate(); err != nil { + // d.Logger.Debugf("failed to validate domain", "error", err) + return nil, errs.Wrap(err) + } + // check for uniqueness + name := domain.Name.MustGet() // safe as we have validated + _, err = d.DomainRepository.GetByName( + ctx, + &name, + &repository.DomainOption{}, + ) + // we expect not to find a domain with this name + if err != nil { + // something went wrong + if !errors.Is(err, gorm.ErrRecordNotFound) { + d.Logger.Errorw("failed to create domain", "error", err) + return nil, errs.Wrap(err) + } + } + // if there is no error, it means we found a domain with this name + if err == nil { + d.Logger.Debugw("domain name is already taken", "error", name.String()) + return nil, validate.WrapErrorWithField(errors.New("not unique"), "name") + } + domain, err = d.handleOwnManagedTLS(ctx, domain) + if err != nil { + return nil, errs.Wrap(err) + } + // create domain + createdDomainID, err := d.DomainRepository.Insert( + ctx, + domain, + ) + if err != nil { + d.Logger.Errorw("failed to create domain", "error", err) + return nil, errs.Wrap(err) + } + ae.Details["id"] = createdDomainID.String() + d.AuditLogAuthorized(ae) + if domain.ManagedTLS.MustGet() && build.Flags.Production { + d.Logger.Debugw("triggering certificate retrieval", "domain", name.String()) + d.triggerCertificateRetrieval(name.String()) + } + return createdDomainID, nil +} + +// triggerCertificateRetrieval attempts to trigger automatic certificate +// by making an HTTPS request to the domain +func (d *Domain) triggerCertificateRetrieval(name string) { + go func() { + ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second) + defer cancel() + + transport := &http.Transport{ + TLSClientConfig: &tls.Config{ + // #nosec + InsecureSkipVerify: true, // since cert won't be valid yet + }, + // Set reasonable timeouts + DialContext: (&net.Dialer{ + Timeout: 10 * time.Second, + KeepAlive: 5 * time.Second, + }).DialContext, + TLSHandshakeTimeout: 10 * time.Second, + ResponseHeaderTimeout: 10 * time.Second, + ExpectContinueTimeout: 1 * time.Second, + IdleConnTimeout: 5 * time.Second, + // disable connection pooling since we only need one request + DisableKeepAlives: true, + MaxIdleConns: -1, + } + + client := &http.Client{ + Transport: transport, + // don't need client timeout as we're using context + } + + req, err := http.NewRequestWithContext(ctx, "GET", "https://"+name, nil) + if err != nil { + d.Logger.Errorw("failed to create request", + "domain", name, + "error", err) + return + } + + resp, err := client.Do(req) + if err != nil { + d.Logger.Errorw("failed to trigger certificate retrieval", + "domain", name, + "error", err) + return + } + // always close response body + if resp != nil && resp.Body != nil { + defer resp.Body.Close() + } + + // clean up transport + transport.CloseIdleConnections() + + d.Logger.Debugw("certificate retrieval triggered", + "domain", name, + "status", resp.StatusCode) + }() +} + +// GetAll gets domains +func (d *Domain) GetAll( + companyID *uuid.UUID, // can be null + ctx context.Context, + session *model.Session, + queryArgs *vo.QueryArgs, + withCompany bool, +) (*model.Result[model.Domain], error) { + result := model.NewEmptyResult[model.Domain]() + ae := NewAuditEvent("Domain.GetAll", session) + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + d.LogAuthError(err) + return result, errs.Wrap(err) + } + if !isAuthorized { + d.AuditLogNotAuthorized(ae) + return result, errs.ErrAuthorizationFailed + } + // get domains + result, err = d.DomainRepository.GetAll( + ctx, + companyID, + &repository.DomainOption{ + QueryArgs: queryArgs, + WithCompany: withCompany, + }, + ) + if err != nil { + d.Logger.Errorw("failed to get domains", "error", err) + return result, errs.Wrap(err) + } + // no audit on read + return result, nil +} + +// GetByName gets a domain by name +func (d *Domain) GetByName( + ctx context.Context, + session *model.Session, + name *vo.String255, + options *repository.DomainOption, +) (*model.Domain, error) { + ae := NewAuditEvent("Domain.GetByName", session) + ae.Details["name"] = name.String() + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + d.LogAuthError(err) + return nil, errs.Wrap(err) + } + if !isAuthorized { + d.AuditLogNotAuthorized(ae) + return nil, errs.ErrAuthorizationFailed + } + // get domain + domain, err := d.DomainRepository.GetByName( + ctx, + name, + options, + ) + if errors.Is(err, gorm.ErrRecordNotFound) { + // return early, this is not an error + return nil, errs.Wrap(err) + } + if err != nil { + d.Logger.Errorw("failed to get domain by name", "error", err) + return nil, errs.Wrap(err) + } + // no audit on read + return domain, nil +} + +// GetAllOverview gets domains with limited data +func (d *Domain) GetAllOverview( + companyID *uuid.UUID, // can be null + ctx context.Context, + session *model.Session, + queryArgs *vo.QueryArgs, +) (*model.Result[model.DomainOverview], error) { + result := model.NewEmptyResult[model.DomainOverview]() + ae := NewAuditEvent("Domain.GetAllOverview", session) + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + d.LogAuthError(err) + return result, errs.Wrap(err) + } + if !isAuthorized { + d.AuditLogNotAuthorized(ae) + return result, errs.ErrAuthorizationFailed + } + // get domains + result, err = d.DomainRepository.GetAllSubset( + ctx, + companyID, + &repository.DomainOption{ + QueryArgs: queryArgs, + }, + ) + if err != nil { + d.Logger.Errorw("failed to get domains subset", "error", err) + return result, errs.Wrap(err) + } + // no audit on read + return result, nil +} + +// GetByID is a function to get domain by id +func (d *Domain) GetByID( + ctx context.Context, + session *model.Session, + id *uuid.UUID, + options *repository.DomainOption, +) (*model.Domain, error) { + ae := NewAuditEvent("Domain.GetByID", session) + ae.Details["id"] = id.String() + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + d.LogAuthError(err) + return nil, errs.Wrap(err) + } + if !isAuthorized { + d.AuditLogNotAuthorized(ae) + return nil, errs.ErrAuthorizationFailed + } + // get domain + domain, err := d.DomainRepository.GetByID( + ctx, + id, + options, + ) + if errors.Is(err, gorm.ErrRecordNotFound) { + // return early, this is not an error + return nil, errs.Wrap(err) + } + if err != nil { + d.Logger.Errorw("failed to get domain by id", "error", err) + return nil, errs.Wrap(err) + } + return domain, nil +} + +// GetByCompanyID is a function to get domain by company id +func (d *Domain) GetByCompanyID( + ctx context.Context, + session *model.Session, + companyID *uuid.UUID, + options *repository.DomainOption, +) (*model.Result[model.Domain], error) { + result := model.NewEmptyResult[model.Domain]() + ae := NewAuditEvent("Domain.GetByCompanyID", session) + ae.Details["companyId"] = companyID.String() + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + d.LogAuthError(err) + return result, errs.Wrap(err) + } + if !isAuthorized { + d.AuditLogNotAuthorized(ae) + return result, errs.ErrAuthorizationFailed + } + // get domains + result, err = d.DomainRepository.GetAllByCompanyID( + ctx, + companyID, + options, + ) + if errors.Is(err, gorm.ErrRecordNotFound) { + // return early, this is not an error + return result, errs.Wrap(err) + } + if err != nil { + d.Logger.Errorw("failed to get domain by company id", "error", err) + return result, errs.Wrap(err) + } + // no audit on read + return result, nil +} + +// UpdateByID updates domain by id +func (d *Domain) UpdateByID( + ctx context.Context, + session *model.Session, + id *uuid.UUID, + incoming *model.Domain, +) error { + ae := NewAuditEvent("Domain.UpdateByID", session) + ae.Details["id"] = id.String() + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + d.LogAuthError(err) + return err + } + if !isAuthorized { + d.AuditLogNotAuthorized(ae) + return errs.ErrAuthorizationFailed + } + // get the domain that is being updated + current, err := d.DomainRepository.GetByID( + ctx, + id, + &repository.DomainOption{}, + ) + if errors.Is(err, gorm.ErrRecordNotFound) { + d.Logger.Debugw("domain not found", "error", err) + return err + } + if err != nil { + d.Logger.Errorw("failed to update domain", "error", err) + return err + } + // set the supplied field on the existing domain + wasManagedTLS := current.ManagedTLS.MustGet() + if v, err := incoming.ManagedTLS.Get(); err == nil { + current.ManagedTLS.Set(v) + } + wasOwnManagedTLS := current.OwnManagedTLS.MustGet() + ownManagedTLSIsSet := false + if v, err := incoming.OwnManagedTLS.Get(); err == nil { + current.OwnManagedTLS.Set(v) + ownManagedTLSIsSet = v + } + ownManagedTLSKeyIsSet := false + if v, err := incoming.OwnManagedTLSKey.Get(); err == nil { + current.OwnManagedTLSKey.Set(v) + ownManagedTLSKeyIsSet = len(v) > 0 + } + ownManagedTLSPemIsSet := false + if v, err := incoming.OwnManagedTLSPem.Get(); err == nil { + current.OwnManagedTLSPem.Set(v) + ownManagedTLSPemIsSet = len(v) > 0 + } + if v, err := incoming.HostWebsite.Get(); err == nil { + current.HostWebsite.Set(v) + } + if v, err := incoming.PageContent.Get(); err == nil { + current.PageContent.Set(v) + } + if v, err := incoming.PageNotFoundContent.Get(); err == nil { + current.PageNotFoundContent.Set(v) + } + if v, err := incoming.RedirectURL.Get(); err == nil { + current.RedirectURL.Set(v) + } + // validate + if err := current.Validate(); err != nil { + d.Logger.Errorw("failed to validate domain", "error", err) + return err + } + // clean up if TLS was previous managed but no longer is + if managedTLS, err := incoming.ManagedTLS.Get(); err == nil && !managedTLS { + if wasManagedTLS { + d.removeManagedDomainTLS(ctx, current.Name.MustGet().String()) + } + } + // if previously was own managed but not anymore, remove the certs and cache + if wasOwnManagedTLS && !ownManagedTLSIsSet { + err = d.removeOwnManagedTLS(current) + if err != nil { + d.Logger.Warnf("failed to remove own managed TLS", "error", err) + } + } + // if previously own managed TLS and now is own managed + if !wasOwnManagedTLS && ownManagedTLSIsSet { + if ownManagedTLSKeyIsSet && ownManagedTLSPemIsSet { + current, err = d.handleOwnManagedTLS(ctx, current) + if err != nil { + return fmt.Errorf("faile to handle own managed TLS: %s", err) + } + } else { + return errs.NewValidationError( + errors.New("Private key and certificate must be provided for own managed TLS"), + ) + } + } + // if previously was own managed TLS + if wasOwnManagedTLS && ownManagedTLSIsSet { + // only if both a key and a certificate is provided, overwrite it + if ownManagedTLSKeyIsSet && ownManagedTLSPemIsSet { + current, err = d.handleOwnManagedTLS(ctx, current) + if err != nil { + return fmt.Errorf("faile to handle own managed TLS: %s", err) + } + } + } + // when updating, the own managed tls can previous be set with uploaded + // key and cert, so only if all of them are provided, we handle them + // update domain + err = d.DomainRepository.UpdateByID( + ctx, + current, + ) + if err != nil { + d.Logger.Errorw("failed to update domain by id", "error", err) + return err + } + d.AuditLogAuthorized(ae) + return nil +} + +// DeleteByID +func (d *Domain) DeleteByID( + ctx context.Context, + session *model.Session, + id *uuid.UUID, +) error { + ae := NewAuditEvent("Domain.DeleteByID", session) + ae.Details["id"] = id.String() + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + d.LogAuthError(err) + return err + } + if !isAuthorized { + d.AuditLogNotAuthorized(ae) + return errs.ErrAuthorizationFailed + } + // get the domain + domain, err := d.DomainRepository.GetByID( + ctx, + id, + &repository.DomainOption{}, + ) + if err != nil { + return err + } + // delete the relation from the campaign templates + err = d.CampaignTemplateService.RemoveDomainByDomainID( + ctx, + session, + id, + ) + if err != nil { + d.Logger.Error("failed to remove domain relation from campaign templates") + return err + } + // delete all asset related to the domain + err = d.AssetService.DeleteAllByDomainID( + ctx, + session, + id, + ) + if err != nil { + d.Logger.Errorw("failed to delete assets related to domain", "error", err) + return err + } + err = d.DomainRepository.DeleteByID( + ctx, + id, + ) + if err != nil { + d.Logger.Errorw("failed to delete domain by id", "error", err) + return err + } + // clean up if TLS was managed + if domain.ManagedTLS.MustGet() { + d.removeManagedDomainTLS(ctx, domain.Name.MustGet().String()) + } + d.AuditLogAuthorized(ae) + return nil +} + +// removeManagedDomainTLS +func (d *Domain) removeManagedDomainTLS(ctx context.Context, domain string) { + issuerKey := certmagic.DefaultACME.IssuerKey() + // check if managed certs exists + sitePrefix := certmagic.StorageKeys.CertsSitePrefix(issuerKey, domain) + if !d.CertMagicConfig.Storage.Exists(ctx, sitePrefix) { + d.Logger.Debugw("cache storage does not exist for", "error", sitePrefix) + return + } + // remove pem + certPath := certmagic.StorageKeys.SiteCert(issuerKey, domain) + err := d.CertMagicConfig.Storage.Delete(ctx, certPath) + if err != nil { + d.Logger.Debugw("attempted to remove managed TLS cert pem", "error", err) + } else { + d.Logger.Debugw("removed managed TLS cert pem", "error", certPath) + } + // remove .key + certKey := certmagic.StorageKeys.SitePrivateKey(issuerKey, domain) + err = d.CertMagicConfig.Storage.Delete(ctx, certKey) + if err != nil { + d.Logger.Debugw("attempted to remove managed TLS cert key", "error", err) + } else { + d.Logger.Debugw("removed managed TLS cert key", "error", certKey) + } + // remove .json info file + certMeta := certmagic.StorageKeys.SiteMeta(issuerKey, domain) + err = d.CertMagicConfig.Storage.Delete(ctx, certMeta) + if err != nil { + d.Logger.Debugw("attempted to remove managed TLS cert meta", "error", err) + } else { + d.Logger.Debugw("removed managed TLS cert meta", "error", certMeta) + } + // remove domain cert folder + err = d.CertMagicConfig.Storage.Delete(ctx, sitePrefix) + if err != nil { + d.Logger.Debugw("attempted to remove managed TLS cert folder", "error", err) + } else { + d.Logger.Debugw("removed managed TLS folder", "error", sitePrefix) + } + // remove from certmagic cache + certs := d.CertMagicCache.AllMatchingCertificates(domain) + for _, cert := range certs { + d.CertMagicCache.Remove([]string{cert.Hash()}) + d.Logger.Debugw("removed cached TLS", + "domain", domain, + "hash", cert.Hash(), + ) + } +} + +func (d *Domain) handleOwnManagedTLS( + ctx context.Context, + domain *model.Domain) (*model.Domain, error) { + name := domain.Name.MustGet().String() + // if the domain is set with self managed TLS + // upload the pem and key + key, _ := domain.OwnManagedTLSKey.Get() + pem, _ := domain.OwnManagedTLSPem.Get() + if len(key) > 0 && len(pem) > 0 { + keyBuffer := bytes.NewBufferString(key) + pemBuffer := bytes.NewBufferString(pem) + path, err := securejoin.SecureJoin(d.OwnManagedCertificatePath, name) + if err != nil { + return nil, fmt.Errorf("failed to join cert path and domain name: %s", err) + } + err = d.FileService.UploadFile( + ctx, + path+"/cert.key", + keyBuffer, + true, + ) + if err != nil { + d.Logger.Errorw( + "failed to upload TLS private key (.key)", + "error", err, + ) + return nil, errs.Wrap(err) + } + err = d.FileService.UploadFile( + ctx, + path+"/cert.pem", + pemBuffer, + true, + ) + if err != nil { + d.Logger.Errorw( + "failed to upload TLS certificate (.pem)", + "error", err, + ) + return nil, errs.Wrap(err) + } + keyBuffer = bytes.NewBufferString(key) + pemBuffer = bytes.NewBufferString(pem) + hash, err := d.CertMagicConfig.CacheUnmanagedCertificatePEMBytes( + ctx, + pemBuffer.Bytes(), + keyBuffer.Bytes(), + []string{}, + ) + if err != nil { + d.Logger.Errorw( + "failed to cache unmanaged cert for", name, + "error", err, + ) + return nil, errs.Wrap(err) + } + d.Logger.Debugw("Cached own managed TLS", + "domain", name, + "hash", hash, + ) + domain.OwnManagedTLS = nullable.NewNullableWithValue(true) + domain.ManagedTLS = nullable.NewNullableWithValue(false) + } else { + domain.OwnManagedTLS = nullable.NewNullableWithValue(false) + } + return domain, nil +} + +func (d *Domain) removeOwnManagedTLS( + domain *model.Domain, +) error { + name := domain.Name.MustGet().String() + path, err := securejoin.SecureJoin(d.OwnManagedCertificatePath, name) + if err != nil { + return fmt.Errorf("failed to delete own managed TLS for '%s' as: %s", name, err) + } + err = d.FileService.DeleteAll(path) + if err != nil { + return fmt.Errorf("failed to delete own managed TLS for '%s' as: %s", name, err) + } + d.Logger.Debugw("removed storage for own managed TLS", "name", name) + certs := d.CertMagicCache.AllMatchingCertificates(name) + for _, cert := range certs { + d.CertMagicCache.Remove([]string{cert.Hash()}) + d.Logger.Debugw("removed cached TLS", + "domain", name, + "hash", cert.Hash(), + ) + } + return nil +} diff --git a/backend/service/email.go b/backend/service/email.go new file mode 100644 index 0000000..a2139d8 --- /dev/null +++ b/backend/service/email.go @@ -0,0 +1,850 @@ +package service + +import ( + "context" + "crypto/tls" + "fmt" + "html/template" + "os" + "path/filepath" + "strings" + + "github.com/go-errors/errors" + + "github.com/google/uuid" + "github.com/oapi-codegen/nullable" + "github.com/phishingclub/phishingclub/build" + "github.com/phishingclub/phishingclub/data" + "github.com/phishingclub/phishingclub/errs" + "github.com/phishingclub/phishingclub/model" + "github.com/phishingclub/phishingclub/repository" + "github.com/phishingclub/phishingclub/validate" + "github.com/phishingclub/phishingclub/vo" + "github.com/wneessen/go-mail" + "gorm.io/gorm" +) + +// Email is a Email service +type Email struct { + Common + EmailRepository *repository.Email + SMTPService *SMTPConfiguration + DomainService *Domain + RecipientService *Recipient + TemplateService *Template + AttachmentService *Attachment + AttachmentPath string +} + +// AddAttachments adds an attachments to a message +func (m *Email) AddAttachments( + ctx context.Context, + session *model.Session, + messageID *uuid.UUID, + attachmentIDs []*uuid.UUID, +) error { + ae := NewAuditEvent("Email.AddAttachments", session) + ae.Details["messageId"] = messageID.String() + + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + m.Logger.Errorw("failed to get email id", "error", err) + return errs.Wrap(err) + } + if !isAuthorized { + m.AuditLogNotAuthorized(ae) + return errs.ErrAuthorizationFailed + } + // TODO check if the user is privliged for the message + _, err = m.EmailRepository.GetByID( + ctx, + messageID, + &repository.EmailOption{}, + ) + if err != nil { + m.Logger.Errorw("failed to add attachment to email", "error", err) + return errs.Wrap(err) + } + // add attachment to message + attachmentIdsStr := []string{} + for _, attachmentID := range attachmentIDs { + attachmentIdsStr = append(attachmentIdsStr, attachmentID.String()) + // get the message to ensure it exists and the user is privliged + _, err = m.AttachmentService.GetByID( + ctx, + session, + attachmentID, + ) + if err != nil { + m.Logger.Errorw("failed to add attachment to email", "error", err) + return errs.Wrap(err) + } + err = m.EmailRepository.AddAttachment( + ctx, + messageID, + attachmentID, + ) + if err != nil { + m.Logger.Errorw("failed to add attachment to email", "error", err) + return errs.Wrap(err) + } + } + ae.Details["attachmentIds"] = attachmentIdsStr + m.AuditLogAuthorized(ae) + return nil +} + +// RemoveAttachment removes an attachment from a email +func (m *Email) RemoveAttachment( + ctx context.Context, + session *model.Session, + emailID *uuid.UUID, + attachmentID *uuid.UUID, +) error { + ae := NewAuditEvent("Email.RemoveAttachment", session) + ae.Details["emailId"] = emailID.String() + ae.Details["attachmentId"] = attachmentID.String() + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + m.LogAuthError(err) + return errs.Wrap(err) + } + if !isAuthorized { + m.AuditLogNotAuthorized(ae) + return errs.ErrAuthorizationFailed + } + // TODO check if the user is privliged for the email + _, err = m.EmailRepository.GetByID( + ctx, + emailID, + &repository.EmailOption{}, + ) + if err != nil { + m.Logger.Errorw("failed to remove attachment from email", "error", err) + return errs.Wrap(err) + } + // get the email to ensure it exists and the user is privliged + _, err = m.EmailRepository.GetByID( + ctx, + emailID, + &repository.EmailOption{}, + ) + if err != nil { + m.Logger.Errorw("failed to remove attachment from email", "error", err) + return errs.Wrap(err) + } + // remove attachment from email + err = m.EmailRepository.RemoveAttachment( + ctx, + emailID, + attachmentID, + ) + if err != nil { + m.Logger.Errorw("failed to remove attachment from email", "error", err) + return errs.Wrap(err) + } + m.AuditLogAuthorized(ae) + return nil +} + +// Create creates a new email +func (m *Email) Create( + ctx context.Context, + session *model.Session, + email *model.Email, +) (*uuid.UUID, error) { + ae := NewAuditEvent("Email.Create", session) + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + m.LogAuthError(err) + + return nil, errs.Wrap(err) + } + if !isAuthorized { + m.AuditLogNotAuthorized(ae) + return nil, errs.ErrAuthorizationFailed + } + // validate + if err := email.Validate(); err != nil { + return nil, errs.Wrap(err) + } + // check uniqueness + var companyID *uuid.UUID + if cid, err := email.CompanyID.Get(); err == nil { + companyID = &cid + } + name := email.Name.MustGet() + isOK, err := repository.CheckNameIsUnique( + ctx, + m.EmailRepository.DB, + "emails", + name.String(), + companyID, + nil, + ) + if err != nil { + m.Logger.Errorw("failed to create email", "error", err) + return nil, errs.Wrap(err) + } + if !isOK { + m.Logger.Debugw("email name is already taken", "error", name.String()) + return nil, validate.WrapErrorWithField(errors.New("is not unique"), "name") + } + // handle tracking pixel + email, err = m.toggleTrackingPixel(email) + if err != nil { + return nil, errs.Wrap(err) + } + // create email + emailID, err := m.EmailRepository.Insert( + ctx, + email, + ) + if err != nil { + m.Logger.Errorw("failed to create email", "error", err) + return nil, errs.Wrap(err) + } + ae.Details["id"] = emailID.String() + m.AuditLogAuthorized(ae) + + return emailID, nil +} + +func (m *Email) toggleTrackingPixel( + email *model.Email, +) (*model.Email, error) { + // add tracking pixel + addTrackingPixel, err := email.AddTrackingPixel.Get() + if err != nil { + return email, nil + } + c, err := email.Content.Get() + if err != nil { + return nil, errs.Wrap(err) + } + var tmp string + if !addTrackingPixel { + tmp = m.TemplateService.RemoveTrackingPixelFromContent(c.String()) + } else { + tmp = m.TemplateService.AddTrackingPixel(c.String()) + } + b, err := vo.NewOptionalString1MB(tmp) + if err != nil { + return nil, errs.Wrap(err) + } + email.Content.Set(*b) + return email, nil +} + +// GetAll gets all emails by pagination with optional company id +func (m *Email) GetAll( + ctx context.Context, + session *model.Session, + companyID *uuid.UUID, + queryArgs *vo.QueryArgs, +) (*model.Result[model.Email], error) { + result := model.NewEmptyResult[model.Email]() + ae := NewAuditEvent("Email.GetAll", session) + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + m.LogAuthError(err) + return result, errs.Wrap(err) + } + if !isAuthorized { + m.AuditLogNotAuthorized(ae) + return result, errs.ErrAuthorizationFailed + } + // get all emails + emails, err := m.EmailRepository.GetAll( + ctx, + companyID, + &repository.EmailOption{ + QueryArgs: queryArgs, + }, + ) + if err != nil { + m.Logger.Errorw("failed to get emails", "error", err) + return result, errs.Wrap(err) + } + // no audit on read + return emails, nil +} + +// GetOverviews gets all email overviews +func (m *Email) GetOverviews( + ctx context.Context, + session *model.Session, + companyID *uuid.UUID, + queryArgs *vo.QueryArgs, +) (*model.Result[model.Email], error) { + result := model.NewEmptyResult[model.Email]() + ae := NewAuditEvent("Email.GetOverviews", session) + if companyID != nil { + ae.Details["companyId"] = companyID.String() + } + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + m.LogAuthError(err) + return result, errs.Wrap(err) + } + if !isAuthorized { + m.AuditLogNotAuthorized(ae) + return result, errs.ErrAuthorizationFailed + } + // get all emails + result, err = m.EmailRepository.GetOverviews( + ctx, + companyID, + &repository.EmailOption{ + QueryArgs: queryArgs, + }, + ) + if err != nil { + m.Logger.Errorw("failed to get emails", "error", err) + return result, errs.Wrap(err) + } + // no audit on read + return result, nil +} + +// GetByID gets a email by id +func (m *Email) GetByID( + ctx context.Context, + session *model.Session, + id *uuid.UUID, +) (*model.Email, error) { + ae := NewAuditEvent("Email.GetByID", session) + ae.Details["id"] = id.String() + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + m.LogAuthError(err) + return nil, errs.Wrap(err) + } + if !isAuthorized { + m.AuditLogNotAuthorized(ae) + return nil, errs.ErrAuthorizationFailed + } + // get email by id + email, err := m.EmailRepository.GetByID( + ctx, + id, + &repository.EmailOption{ + WithAttachments: true, + }, + ) + if errors.Is(err, gorm.ErrRecordNotFound) { + // return early this is not an error + return nil, errs.Wrap(err) + } + if err != nil { + m.Logger.Errorw("failed to get email by id", "error", err) + return nil, errs.Wrap(err) + } + // no audit on read + return email, nil +} + +// GetByCompanyID gets a emails by company id +func (m *Email) GetByCompanyID( + ctx context.Context, + session *model.Session, + companyID *uuid.UUID, +) (*model.Result[model.Email], error) { + result := model.NewEmptyResult[model.Email]() + ae := NewAuditEvent("Email.GetByCompanyID", session) + if companyID != nil { + ae.Details["companyId"] = companyID.String() + } + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + m.LogAuthError(err) + return result, errs.Wrap(err) + } + if !isAuthorized { + m.AuditLogNotAuthorized(ae) + return result, errs.ErrAuthorizationFailed + } + // get emails by id + result, err = m.EmailRepository.GetAllByCompanyID( + ctx, + companyID, + &repository.EmailOption{}, + ) + if errors.Is(err, gorm.ErrRecordNotFound) { + // return early this is not an error + return result, errs.Wrap(err) + } + if err != nil { + m.Logger.Errorw("failed to get email by id", "error", err) + return result, errs.Wrap(err) + } + // no audit on read + return result, nil +} + +// SendTestEmail sends a test email +func (m *Email) SendTestEmail( + ctx context.Context, + session *model.Session, + emailID *uuid.UUID, + smtpID *uuid.UUID, + domainID *uuid.UUID, + recpID *uuid.UUID, +) error { + ae := NewAuditEvent("Email.SendTestEmail", session) + ae.Details["emailId"] = emailID.String() + ae.Details["smtpId"] = smtpID.String() + ae.Details["recipientId"] = recpID.String() + ae.Details["domainID"] = domainID.String() + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + m.LogAuthError(err) + return errs.Wrap(err) + } + if !isAuthorized { + m.AuditLogNotAuthorized(ae) + return errs.ErrAuthorizationFailed + } + // get recipient by id + recipient, err := m.RecipientService.GetByID( + ctx, + session, + recpID, + &repository.RecipientOption{}, + ) + if errors.Is(err, gorm.ErrRecordNotFound) { + m.Logger.Infow("failed to send test email - recipient not found", + "recipientID", recpID.String(), + ) + return errs.Wrap(err) + } + // get smtp by id + smtp, err := m.SMTPService.GetByID(ctx, session, smtpID, &repository.SMTPConfigurationOption{}) + if errors.Is(err, gorm.ErrRecordNotFound) { + m.Logger.Infow("failed to send test email - stmp not found", + "SMTPID", smtpID.String(), + ) + return errs.Wrap(err) + } + if err != nil { + m.Logger.Errorw("failed to send test email, smtp not found", "error", err) + return err + } + // get domain by id + testDomain, err := m.DomainService.GetByID(ctx, session, domainID, &repository.DomainOption{}) + if errors.Is(err, gorm.ErrRecordNotFound) { + m.Logger.Infow("failed to send test email - domain not found", + "DomainID", domainID.String(), + ) + return errs.Wrap(err) + } + if err != nil { + m.Logger.Errorw("failed to send test email, domain not found", "error", err) + return err + } + // get email by id + email, err := m.EmailRepository.GetByID( + ctx, + emailID, + &repository.EmailOption{ + WithAttachments: true, + }, + ) + if errors.Is(err, gorm.ErrRecordNotFound) { + m.Logger.Infow("failed to send test email - email not found", + "emailID", emailID.String(), + ) + return errs.Wrap(err) + } + if err != nil { + m.Logger.Errorw("failed to send test email - email not found", "error", err) + return errs.Wrap(err) + } + campaignRecipient := &model.CampaignRecipient{ + ID: nullable.NewNullableWithValue(uuid.New()), + Recipient: recipient, + } + smtpPort, err := smtp.Port.Get() + if err != nil { + m.Logger.Errorw("failed to get smtp port", "error", err) + return errs.Wrap(err) + } + smtpHost, err := smtp.Host.Get() + if err != nil { + m.Logger.Errorw("failed to get smtp host", "error", err) + return errs.Wrap(err) + } + smtpIgnoreCertErrors, err := smtp.IgnoreCertErrors.Get() + if err != nil { + m.Logger.Errorw("failed to get smtp ignore cert errors", "error", err) + return errs.Wrap(err) + } + emailOptions := []mail.Option{ + mail.WithPort(smtpPort.Int()), + mail.WithTLSConfig( + &tls.Config{ + ServerName: smtpHost.String(), + // #nosec + InsecureSkipVerify: smtpIgnoreCertErrors, + // MinVersion: tls.VersionTLS12, + + }, + ), + } + // setup authentication if provided + username, err := smtp.Username.Get() + if err != nil { + m.Logger.Errorw("failed to get smtp username", "error", err) + return errs.Wrap(err) + } + password, err := smtp.Password.Get() + if err != nil { + m.Logger.Errorw("failed to get smtp password", "error", err) + return errs.Wrap(err) + } + if un := username.String(); len(un) > 0 { + emailOptions = append( + emailOptions, + mail.WithUsername( + un, + ), + ) + if pw := password.String(); len(pw) > 0 { + emailOptions = append( + emailOptions, + mail.WithPassword( + pw, + ), + ) + } + } + // prepare message + messageOptions := []mail.MsgOption{ + mail.WithNoDefaultUserAgent(), + } + msg := mail.NewMsg(messageOptions...) + err = msg.EnvelopeFrom(email.MailEnvelopeFrom.MustGet().String()) + if err != nil { + m.Logger.Errorw("failed to set envelope from", "error", err) + return errs.Wrap(err) + } + // headers + err = msg.From(email.MailHeaderFrom.MustGet().String()) + if err != nil { + m.Logger.Errorw("failed to set mail header 'From'", "error", err) + return errs.Wrap(err) + } + recpEmail := campaignRecipient.Recipient.Email.MustGet().String() + err = msg.To(recpEmail) + if err != nil { + m.Logger.Errorw("failed to set mail header 'To'", "error", err) + return errs.Wrap(err) + } + // custom headers + if headers := smtp.Headers; headers != nil { + for _, header := range headers { + key := header.Key.MustGet() + value := header.Value.MustGet() + msg.SetGenHeader( + mail.Header(key.String()), + value.String(), + ) + } + } + msg.Subject(email.MailHeaderSubject.MustGet().String()) + domainName, err := testDomain.Name.Get() + if err != nil { + m.Logger.Errorw("failed to get domain name", "error", err) + return errs.Wrap(err) + } + + // create template + content, err := email.Content.Get() + if err != nil { + m.Logger.Errorw("failed to get message content", "error", err) + return errs.Wrap(err) + } + + mailTmpl, err := template. + New("email"). + Funcs(TemplateFuncs()). + Parse(content.String()) + + if err != nil { + m.Logger.Errorw("failed to parse email template", "error", err) + return errs.Wrap(err) + } + t := m.TemplateService.CreateMail( + domainName.String(), + "id", + "/", + campaignRecipient, + email, + nil, + ) + err = msg.SetBodyHTMLTemplate(mailTmpl, t) + if err != nil { + m.Logger.Errorw("failed to set body html template", "error", err) + return errs.Wrap(err) + } + // attachments + attachments := email.Attachments + for _, attachment := range attachments { + p, err := m.AttachmentService.GetPath(attachment) + if err != nil { + return fmt.Errorf("failed to get attachment path: %s", err) + } + if !attachment.EmbeddedContent.MustGet() { + msg.AttachFile(p.String()) + } else { + attachmentContent, err := os.ReadFile(p.String()) + if err != nil { + return errs.Wrap(err) + } + // hacky setup of attachment for executing as email template + attachmentAsEmail := model.Email{ + ID: email.ID, + CreatedAt: email.CreatedAt, + UpdatedAt: email.UpdatedAt, + Name: email.Name, + MailEnvelopeFrom: email.MailEnvelopeFrom, + MailHeaderFrom: email.MailHeaderFrom, + MailHeaderSubject: email.MailHeaderSubject, + Content: email.Content, + AddTrackingPixel: email.AddTrackingPixel, + CompanyID: email.CompanyID, + Attachments: email.Attachments, + Company: email.Company, + } + // really hacky / unsafe + attachmentAsEmail.Content = nullable.NewNullableWithValue( + *vo.NewUnsafeOptionalString1MB(string(attachmentContent)), + ) + attachmentStr, err := m.TemplateService.CreateMailBody( + "id", + "/", + testDomain, + campaignRecipient, + &attachmentAsEmail, + nil, + ) + if err != nil { + return fmt.Errorf("failed to setup attachment with embedded content: %s", err) + } + msg.AttachReadSeeker( + filepath.Base(p.String()), + strings.NewReader(attachmentStr), + ) + } + } + // the client sends all the messages and ensure that all messages are sent + // in the same connection + var mc *mail.Client + + // Try different authentication methods based on configuration + // If username is provided, use authentication; otherwise try without auth first + if un := username.String(); len(un) > 0 { + // Try CRAM-MD5 first when credentials are provided + emailOptionsCRAM5 := append(emailOptions, mail.WithSMTPAuth(mail.SMTPAuthCramMD5)) + mc, _ = mail.NewClient(smtpHost.String(), emailOptionsCRAM5...) + if build.Flags.Production { + mc.SetTLSPolicy(mail.TLSMandatory) + } else { + mc.SetTLSPolicy(mail.TLSOpportunistic) + } + err = mc.DialAndSendWithContext(ctx, msg) + + // Check if it's an authentication error and try PLAIN auth + if err != nil && (strings.Contains(err.Error(), "535 ") || + strings.Contains(err.Error(), "534 ") || + strings.Contains(err.Error(), "538 ") || + strings.Contains(err.Error(), "CRAM-MD5") || + strings.Contains(err.Error(), "authentication failed")) { + m.Logger.Warnw("CRAM-MD5 authentication failed, trying PLAIN auth", "error", err) + emailOptionsBasic := emailOptions + if build.Flags.Production { + emailOptionsBasic = append(emailOptions, mail.WithSMTPAuth(mail.SMTPAuthPlain)) + } + mc, _ = mail.NewClient(smtpHost.String(), emailOptionsBasic...) + if build.Flags.Production { + mc.SetTLSPolicy(mail.TLSMandatory) + } else { + mc.SetTLSPolicy(mail.TLSOpportunistic) + } + err = mc.DialAndSendWithContext(ctx, msg) + } + } else { + // No credentials provided, try without authentication (e.g., local postfix) + mc, _ = mail.NewClient(smtpHost.String(), emailOptions...) + if build.Flags.Production { + mc.SetTLSPolicy(mail.TLSMandatory) + } else { + mc.SetTLSPolicy(mail.TLSOpportunistic) + } + err = mc.DialAndSendWithContext(ctx, msg) + + // If no-auth fails and we get an auth-related error, log it appropriately + if err != nil && (strings.Contains(err.Error(), "530 ") || + strings.Contains(err.Error(), "535 ") || + strings.Contains(err.Error(), "authentication required") || + strings.Contains(err.Error(), "AUTH")) { + m.Logger.Warnw("Server requires authentication but no credentials provided", "error", err) + } + } + if err != nil { + m.Logger.Errorw("failed to send test email", "error", err) + if msg.HasSendError() { + m.Logger.Errorw("failed to send test email", "error", msg.SendError()) + return msg.SendError() + } + return err + } + m.AuditLogAuthorized(ae) + return nil +} + +// UpdateByID updates a email by id +func (m *Email) UpdateByID( + ctx context.Context, + session *model.Session, + id *uuid.UUID, + email *model.Email, +) error { + ae := NewAuditEvent("Email.UpdateByID", session) + ae.Details["id"] = id.String() + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + m.LogAuthError(err) + return errs.Wrap(err) + } + if !isAuthorized { + m.AuditLogNotAuthorized(ae) + return errs.Wrap(errs.ErrAuthorizationFailed) + } + // get current by id + current, err := m.EmailRepository.GetByID( + ctx, + id, + &repository.EmailOption{}, + ) + if errors.Is(err, gorm.ErrRecordNotFound) { + m.Logger.Debugw("failed to update email by ID", "error", err) + return errs.Wrap(err) + } + if err != nil { + m.Logger.Errorw("failed to update email by ID", "error", err) + return errs.Wrap(err) + } + var companyID *uuid.UUID + if cid, err := email.CompanyID.Get(); err == nil { + companyID = &cid + } + // check uniqueness + name := email.Name.MustGet() + isOK, err := repository.CheckNameIsUnique( + ctx, + m.EmailRepository.DB, + "emails", + name.String(), + companyID, + id, + ) + if err != nil { + m.Logger.Errorw("failed to create email", "error", err) + return errs.Wrap(err) + } + if !isOK { + m.Logger.Debugw("email name is already taken", "name", name.String()) + return validate.WrapErrorWithField(errors.New("is not unique"), "name") + } + // update email - if a field is present and not null, update it + if v, err := email.Name.Get(); err == nil { + current.Name.Set(v) + } + if v, err := email.MailEnvelopeFrom.Get(); err == nil { + current.MailEnvelopeFrom.Set(v) + } + if v, err := email.MailHeaderFrom.Get(); err == nil { + current.MailHeaderFrom.Set(v) + } + if v, err := email.MailHeaderSubject.Get(); err == nil { + current.MailHeaderSubject.Set(v) + } + if v, err := email.Content.Get(); err == nil { + if _, err := email.AddTrackingPixel.Get(); err == nil { + // handle tracking pixel + email, err = m.toggleTrackingPixel(email) + if err != nil { + return errs.Wrap(err) + } + current.Content.Set(email.Content.MustGet()) + } else { + current.Content.Set(v) + } + } + if v, err := email.AddTrackingPixel.Get(); err == nil { + current.AddTrackingPixel.Set(v) + } + if v, err := email.CompanyID.Get(); err == nil { + current.CompanyID.Set(v) + } + // validate change + if err := current.Validate(); err != nil { + m.Logger.Errorw("failed to update email by ID", "error", err) + return errs.Wrap(err) + } + // update email + err = m.EmailRepository.UpdateByID( + ctx, + id, + current, + ) + if err != nil { + m.Logger.Errorw("failed to update email by ID", "error", err) + return errs.Wrap(err) + } + m.AuditLogAuthorized(ae) + + return nil +} + +// DeleteByID deletes a email by id +func (m *Email) DeleteByID( + ctx context.Context, + session *model.Session, + id *uuid.UUID, +) error { + ae := NewAuditEvent("Email.DeleteByID", session) + ae.Details["id"] = id.String() + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + m.LogAuthError(err) + return errs.Wrap(err) + } + if !isAuthorized { + m.AuditLogNotAuthorized(ae) + return errs.ErrAuthorizationFailed + } + // delete email by id + err = m.EmailRepository.DeleteByID( + ctx, + id, + ) + if err != nil { + m.Logger.Errorw("failed to delete message by id", "error", err) + return errs.Wrap(err) + } + m.AuditLogAuthorized(ae) + + return nil +} diff --git a/backend/service/file.go b/backend/service/file.go new file mode 100644 index 0000000..8596b53 --- /dev/null +++ b/backend/service/file.go @@ -0,0 +1,237 @@ +package service + +import ( + "bytes" + "context" + "fmt" + "io/fs" + "mime/multipart" + "os" + "path/filepath" + "strings" + + "github.com/go-errors/errors" + + "github.com/gin-gonic/gin" + "github.com/phishingclub/phishingclub/errs" + "github.com/phishingclub/phishingclub/validate" +) + +// FileUpload is a file upload +type FileUpload struct { + Path string + File *multipart.FileHeader +} + +// NewFileUpload creates a new file upload +func NewFileUpload(path string, file *multipart.FileHeader) *FileUpload { + return &FileUpload{ + Path: path, + File: file, + } +} + +// File is a File service +type File struct { + Common +} + +// checkFilePathIsValidForUpload checks if the file path is valid for upload +func (f *File) checkFilePathIsValidForUpload(path string) error { + parts := strings.Split(path, "/") + // Check each part of the path + for i := 1; i < len(parts); i++ { + partPath := strings.Join(parts[:i], "/") + info, err := os.Stat(partPath) + if err != nil { + if errors.Is(err, fs.ErrNotExist) { + // The path part does not exist, which is expected as we are still constructing the full path + continue + } else { + // Some other error occurred + return fmt.Errorf("failed to check asset (%s) path info: %w", partPath, err) + } + } + if !info.IsDir() { + return fmt.Errorf( + "part of the path is a file: '%s' of '%s'", + partPath, + path, + ) + } + } + return nil +} + +func (f *File) Upload( + g *gin.Context, + files []*FileUpload, +) (int, error) { + for _, fileUpload := range files { + path := fileUpload.Path + file := fileUpload.File + f.Logger.Debugw("checking if file exists", "path", path) + err := f.checkFilePathIsValidForUpload(path) + if err != nil && !errors.Is(err, errs.ErrValidationFailed) { + return 0, errs.Wrap(err) + } + if err != nil { + f.Logger.Errorw("failed to check file path for upload", "error", err) + return 0, errs.Wrap(err) + } + // check if the file exists + _, err = os.Stat(path) + pathDoesNotExists := errors.Is(err, fs.ErrNotExist) + if err != nil && !pathDoesNotExists { + f.Logger.Errorw("failed to get asset path info", "error", err) + return 0, errs.Wrap(err) + } + // a file or folder already exists + if !pathDoesNotExists { + filePathNotExistsMsg := fmt.Sprintf("file already exists at '%s'", path) + f.Logger.Debug(filePathNotExistsMsg) + return 0, validate.WrapErrorWithField( + errs.NewValidationError( + errors.New("a file already exists with that name"), + ), + "file", + ) + } + // Upload the file + err = g.SaveUploadedFile(file, path) + if err != nil { + f.Logger.Errorw("failed to save uploaded file", "error", err) + return 0, errs.Wrap(err) + } + f.Logger.Debugw("file uploaded", "error", path) + } + return len(files), nil +} + +func (f *File) UploadFile( + ctx context.Context, + path string, + contents *bytes.Buffer, + overwrite bool, +) error { + f.Logger.Debugw("checking if file exists", "path", path) + err := f.checkFilePathIsValidForUpload(path) + if err != nil && !errors.Is(err, errs.ErrValidationFailed) { + return err + } + if err != nil { + f.Logger.Errorw("failed to check file path for upload", "error", err) + return err + } + // check if the file exists + _, err = os.Stat(path) + pathDoesNotExists := errors.Is(err, fs.ErrNotExist) + if err != nil && !pathDoesNotExists { + f.Logger.Errorw("failed to get asset path info", "error", err) + return err + } + // a file or folder already exists + if !overwrite && !pathDoesNotExists { + filePathNotExistsMsg := fmt.Sprintf("file already exists at '%s'", path) + f.Logger.Debug(filePathNotExistsMsg) + return validate.WrapErrorWithField( + errs.NewValidationError( + errors.New("a file already exists with that name"), + ), + "file", + ) + } + // Create directories if they don't exist + dir := filepath.Dir(path) + if err := os.MkdirAll(dir, 0750); err != nil { + f.Logger.Errorw("failed to create directories", "error", err) + return err + } + if overwrite { + f.Logger.Debug("removing existing file...") + err = os.RemoveAll(path) + if err != nil { + f.Logger.Errorw("failed to remove existing file", "error", err) + return err + } + } + // #nosec + outFile, err := os.Create(path) + if err != nil { + f.Logger.Errorw("failed to create file", "error", err) + return err + } + + // #nosec + _, err = contents.WriteTo(outFile) + _ = outFile.Close() + if err != nil { + f.Logger.Errorw("failed to write file", "error", err) + return err + } + + f.Logger.Debugw("file uploaded", "path", path) + return nil +} + +// Delete deletes a file +func (f *File) Delete( + path string, +) error { + err := os.Remove(path) + if err != nil { + f.Logger.Errorw("failed to delete file", "error", err) + return err + } + return nil +} + +// DeleteAll deletes a file or folder recursively +func (f *File) DeleteAll( + path string, +) error { + err := os.RemoveAll(path) + if err != nil { + f.Logger.Errorw("failed to delete path", "error", err) + return err + } + return nil +} + +// RemoveEmptyFolderRecursively folders recursively deletes all empty folders +// until it hits an non-empty folder or the root +func (f *File) RemoveEmptyFolderRecursively( + rootPath string, + path string, +) error { + f.Logger.Debugw("Checking if empty folders should be removed, root: %s, path: %s", + "rootPath", rootPath, + "path", path, + ) + + // check if the path is the root + if path == rootPath { + f.Logger.Debug("path is the root, stopping recursion") + return nil + } + // check if the path is empty + entries, err := os.ReadDir(path) + if err != nil { + f.Logger.Errorw("failed to read directory", "error", err) + return nil + } + if len(entries) > 0 { + f.Logger.Debug("path is not empty, stopping recursion") + return nil + } + // delete the empty folder + f.Logger.Debugw("deleting empty folder", "path", path) + err = os.Remove(path) + if err != nil { + f.Logger.Errorw("failed to delete empty folder", "error", err) + return err + } + // check the parent folder + parent := filepath.Dir(path) + return f.RemoveEmptyFolderRecursively(rootPath, parent) +} diff --git a/backend/service/identifier.go b/backend/service/identifier.go new file mode 100644 index 0000000..a27c049 --- /dev/null +++ b/backend/service/identifier.go @@ -0,0 +1,44 @@ +package service + +import ( + "context" + + "github.com/go-errors/errors" + + "github.com/phishingclub/phishingclub/data" + "github.com/phishingclub/phishingclub/errs" + "github.com/phishingclub/phishingclub/model" + "github.com/phishingclub/phishingclub/repository" +) + +type Identifier struct { + Common + IdentifierRepository *repository.Identifier +} + +func (i *Identifier) GetAll( + ctx context.Context, + session *model.Session, + options *repository.IdentifierOption, +) (*model.Result[model.Identifier], error) { + result := model.NewEmptyResult[model.Identifier]() + ae := NewAuditEvent("Identifier.GetAll", session) + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + i.LogAuthError(err) + return result, errs.Wrap(err) + } + if !isAuthorized { + i.AuditLogNotAuthorized(ae) + return result, errs.ErrAuthorizationFailed + } + // get + result, err = i.IdentifierRepository.GetAll(ctx, options) + if err != nil { + i.Logger.Errorw("failed to get all identifiers", "error", err) + return nil, errs.Wrap(err) + } + // no audit on read + return result, nil +} diff --git a/backend/service/import.go b/backend/service/import.go new file mode 100644 index 0000000..db7fc88 --- /dev/null +++ b/backend/service/import.go @@ -0,0 +1,774 @@ +package service + +import ( + "archive/zip" + "bytes" + "fmt" + "io" + "mime/multipart" + "path/filepath" + "strings" + + "gopkg.in/yaml.v3" + + securejoin "github.com/cyphar/filepath-securejoin" + "github.com/gin-gonic/gin" + "github.com/go-errors/errors" + "github.com/google/uuid" + "github.com/oapi-codegen/nullable" + "github.com/phishingclub/phishingclub/data" + "github.com/phishingclub/phishingclub/errs" + "github.com/phishingclub/phishingclub/model" + "github.com/phishingclub/phishingclub/repository" + "github.com/phishingclub/phishingclub/vo" + "gorm.io/gorm" +) + +const ( + MaxIndividualFileSize = 10 * 1024 * 1024 // 10MB per file + MaxTotalExtractedSize = 200 * 1024 * 1024 // 200MB total extracted + MaxFileCount = 20000 // Maximum files in zip + MaxCompressionRatio = 100 // Maximum compression ratio (100:1) +) + +// validateCompressionRatio checks if the compression ratio is within safe limits +func validateCompressionRatio(compressedSize, uncompressedSize int64) error { + if compressedSize == 0 { + return fmt.Errorf("invalid compressed size") + } + ratio := float64(uncompressedSize) / float64(compressedSize) + if ratio > MaxCompressionRatio { + return fmt.Errorf("compression ratio too high: %.2f (max: %d)", ratio, MaxCompressionRatio) + } + return nil +} + +// Import controller handles import from assets, emails, landing pages and etc. +type Import struct { + Common + File *File + Asset *Asset + Page *Page + Email *Email + EmailRepository *repository.Email + PageRepository *repository.Page +} + +// DataYAML represents the structure of data.yaml for templates +type EmailMeta struct { + Name string `yaml:"name"` + File string `yaml:"file"` + EnvelopeFrom string `yaml:"envelope from"` + From string `yaml:"from"` + Subject string `yaml:"subject"` +} + +type DataYAML struct { + Name string `yaml:"name"` + Pages []struct { + Name string `yaml:"name"` + File string `yaml:"file"` + } `yaml:"pages"` + Emails []EmailMeta `yaml:"emails"` +} + +// Import takes a file header and import from a zip file +type ImportSummary struct { + AssetsCreated int `json:"assets_created"` + AssetsCreatedList []string `json:"assets_created_list"` + AssetsSkipped int `json:"assets_skipped"` + AssetsSkippedList []string `json:"assets_skipped_list"` + AssetsErrors int `json:"assets_errors"` + AssetsErrorsList []ImportError `json:"assets_errors_list"` + + PagesCreated int `json:"pages_created"` + PagesCreatedList []string `json:"pages_created_list"` + PagesUpdated int `json:"pages_updated"` + PagesUpdatedList []string `json:"pages_updated_list"` + PagesSkipped int `json:"pages_skipped"` + PagesSkippedList []string `json:"pages_skipped_list"` + PagesErrors int `json:"pages_errors"` + PagesErrorsList []ImportError `json:"pages_errors_list"` + + EmailsCreated int `json:"emails_created"` + EmailsCreatedList []string `json:"emails_created_list"` + EmailsUpdated int `json:"emails_updated"` + EmailsUpdatedList []string `json:"emails_updated_list"` + EmailsSkipped int `json:"emails_skipped"` + EmailsSkippedList []string `json:"emails_skipped_list"` + EmailsErrors int `json:"emails_errors"` + EmailsErrorsList []ImportError `json:"emails_errors_list"` + + // unspecificed errors + Errors []ImportError `json:"errors"` +} + +func NewImportSummary() *ImportSummary { + return &ImportSummary{ + AssetsCreatedList: []string{}, + AssetsSkippedList: []string{}, + AssetsErrorsList: []ImportError{}, + + PagesCreatedList: []string{}, + PagesUpdatedList: []string{}, + PagesSkippedList: []string{}, + PagesErrorsList: []ImportError{}, + + EmailsCreatedList: []string{}, + EmailsUpdatedList: []string{}, + EmailsSkippedList: []string{}, + EmailsErrorsList: []ImportError{}, + + Errors: []ImportError{}, + } +} + +type ImportError struct { + Type string `json:"type"` // "asset", "page", "email", "data.yaml" + Name string `json:"name"` // file or template name + Message string `json:"message"` +} + +func (im *Import) Import( + g *gin.Context, + session *model.Session, + fileHeader *multipart.FileHeader, + forCompany bool, + companyID *uuid.UUID, +) (*ImportSummary, error) { + ae := NewAuditEvent("Import.Import", session) + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + im.LogAuthError(err) + return nil, errs.Wrap(err) + } + if !isAuthorized { + im.AuditLogNotAuthorized(ae) + return nil, errs.ErrAuthorizationFailed + } + // handle file + zipFile, err := fileHeader.Open() + if err != nil { + return nil, err + } + defer zipFile.Close() + + // Read all bytes from zipFile to allow random access + zipBytes, err := io.ReadAll(zipFile) + if err != nil { + return nil, err + } + readerAt := bytes.NewReader(zipBytes) + r, err := zip.NewReader(readerAt, int64(len(zipBytes))) + if err != nil { + return nil, errs.Wrap(err) + } + + // Validate zip file structure and prevent zip bombs + var totalUncompressedSize int64 + fileCount := 0 + for _, f := range r.File { + if f.FileInfo().IsDir() { + continue + } + + fileCount++ + if fileCount > MaxFileCount { + return nil, fmt.Errorf("zip contains too many files: %d (max: %d)", fileCount, MaxFileCount) + } + + if f.UncompressedSize64 > MaxIndividualFileSize { + return nil, fmt.Errorf("file %s is too large: %d bytes (max: %d)", f.Name, f.UncompressedSize64, MaxIndividualFileSize) + } + + totalUncompressedSize += int64(f.UncompressedSize64) + if totalUncompressedSize > MaxTotalExtractedSize { + return nil, fmt.Errorf("total extracted size too large: %d bytes (max: %d)", totalUncompressedSize, MaxTotalExtractedSize) + } + + if err := validateCompressionRatio(int64(f.CompressedSize64), int64(f.UncompressedSize64)); err != nil { + return nil, fmt.Errorf("file %s: %v", f.Name, err) + } + } + + summary := &ImportSummary{} + // 1. Collect all asset files from root-level "assets/" + var assetFiles []*zip.File + for _, f := range r.File { + if !f.FileInfo().IsDir() && strings.HasPrefix(f.Name, "assets/") { + assetFiles = append(assetFiles, f) + } + } + for _, assetFile := range assetFiles { + // Compute relative path inside assets/ + relPath := strings.TrimPrefix(assetFile.Name, "assets/") + // Check DB for asset existence + createdNew, err := im.createAssetFromZipFile(g, session, assetFile, relPath) + if err != nil { + summary.AssetsErrors++ + summary.AssetsErrorsList = append(summary.AssetsErrorsList, ImportError{ + Type: "asset", + Name: assetFile.Name, + Message: err.Error(), + }) + } else if createdNew { + summary.AssetsCreated++ + summary.AssetsCreatedList = append(summary.AssetsCreatedList, relPath) + } else { + summary.AssetsSkipped++ + summary.AssetsSkippedList = append(summary.AssetsSkippedList, relPath) + } + } + + // 2. Find all folders containing a data.yaml and process them as template folders + // Map: folder path -> *zip.File for data.yaml + templateFolders := make(map[string]*zip.File) + for _, f := range r.File { + if !f.FileInfo().IsDir() && strings.HasSuffix(f.Name, "data.yaml") { + dir := filepath.Dir(f.Name) + templateFolders[dir] = f + im.Logger.Debugw("Found template folder", "folder", dir, "dataYamlPath", f.Name) + } + } + im.Logger.Infow("Template folder discovery complete", "templateFolderCount", len(templateFolders)) + + // 3. Find all standalone template files without data.yaml + standaloneTemplates := make(map[string][]*zip.File) // folder -> list of HTML files + processedFolders := make(map[string]bool) + + // Mark folders with data.yaml as processed + for folder := range templateFolders { + processedFolders[folder] = true + } + + // Find standalone HTML files in folders without data.yaml + for _, f := range r.File { + if f.FileInfo().IsDir() { + continue + } + + fileName := filepath.Base(f.Name) + folder := filepath.Dir(f.Name) + + // Skip if this folder already has data.yaml or is assets folder + if processedFolders[folder] || strings.HasPrefix(f.Name, "assets/") { + continue + } + + // Look for common template file patterns + if strings.HasSuffix(fileName, ".html") && + (fileName == "landing.html" || fileName == "index.html" || + fileName == "email.html" || fileName == "landingpage.html") { + standaloneTemplates[folder] = append(standaloneTemplates[folder], f) + im.Logger.Debugw("Found standalone template file", "folder", folder, "file", fileName) + } + } + + im.Logger.Infow("Template discovery complete", "foldersWithDataYaml", len(templateFolders), "standaloneTemplateFolders", len(standaloneTemplates)) + + // 4. For each template folder, parse data.yaml and process pages/emails + // Helper for robust zip relative path calculation + zipRelPath := func(folder, name string) string { + cleanName := filepath.Clean(filepath.ToSlash(name)) + cleanFolder := filepath.Clean(filepath.ToSlash(folder)) + if cleanFolder == "." { + return cleanName + } + if !strings.HasPrefix(cleanName, cleanFolder+"/") { + return cleanName + } + return strings.TrimPrefix(cleanName, cleanFolder+"/") + } + + im.Logger.Infow("Starting template folder processing", "totalTemplateFolders", len(templateFolders), "totalZipFiles", len(r.File)) + + // Pre-build file indices for efficient lookup + buildFileIndex := func(folder string) map[string]*zip.File { + fileIndex := make(map[string]*zip.File) + folderPath := filepath.ToSlash(folder) + + for _, f := range r.File { + if f.FileInfo().IsDir() { + continue + } + zipPath := filepath.ToSlash(f.Name) + + // Check if file is in this template folder + if folderPath != "." && !strings.HasPrefix(zipPath, folderPath+"/") { + continue + } + + relPath := zipRelPath(folder, f.Name) + cleanRelPath := filepath.Clean(filepath.ToSlash(relPath)) + + // Index by cleaned relative path + fileIndex[cleanRelPath] = f + // Also index by case-insensitive version for robustness + fileIndex[strings.ToLower(cleanRelPath)] = f + } + return fileIndex + } + + for folder, dataYamlFile := range templateFolders { + im.Logger.Infow("Processing template folder", "folder", folder, "dataYamlFile", dataYamlFile.Name) + + // Read data.yaml content + rc, err := dataYamlFile.Open() + if err != nil { + summary.Errors = append(summary.Errors, ImportError{ + Type: "data.yaml", + Name: dataYamlFile.Name, + Message: fmt.Sprintf("failed to open data.yaml in %s: %v", folder, err), + }) + continue + } + dataYamlContent, err := io.ReadAll(rc) + rc.Close() + if err != nil { + summary.Errors = append(summary.Errors, ImportError{ + Type: "data.yaml", + Name: dataYamlFile.Name, + Message: fmt.Sprintf("failed to read data.yaml in %s: %v", folder, err), + }) + continue + } + var dataYaml DataYAML + if err := yaml.Unmarshal(dataYamlContent, &dataYaml); err != nil { + summary.Errors = append(summary.Errors, ImportError{ + Type: "data.yaml", + Name: dataYamlFile.Name, + Message: fmt.Sprintf("failed to parse data.yaml in %s: %v", folder, err), + }) + continue + } + + im.Logger.Infow("Parsed data.yaml", "folder", folder, "name", dataYaml.Name, "pageCount", len(dataYaml.Pages), "emailCount", len(dataYaml.Emails)) + + // Build file index for this template folder + fileIndex := buildFileIndex(folder) + im.Logger.Debugw("Built file index for folder", "folder", folder, "fileCount", len(fileIndex)) + + // Build sets of page and email file relative paths (relative to the template folder) + pageFiles := make(map[string]struct{}) + for _, page := range dataYaml.Pages { + cleanPageFile := filepath.Clean(filepath.ToSlash(page.File)) + im.Logger.Debugw("Page file from yaml", "original", page.File, "cleaned", cleanPageFile) + pageFiles[cleanPageFile] = struct{}{} + } + emailFiles := make(map[string]struct{}) + for _, email := range dataYaml.Emails { + cleanEmailFile := filepath.Clean(filepath.ToSlash(email.File)) + im.Logger.Debugw("Email file from yaml", "original", email.File, "cleaned", cleanEmailFile) + emailFiles[cleanEmailFile] = struct{}{} + } + + // For each file in the zip, check if it's under this template folder (including subfolders) + for _, f := range r.File { + if f.FileInfo().IsDir() { + continue + } + zipPath := filepath.ToSlash(f.Name) + folderPath := filepath.ToSlash(folder) + if folderPath != "." && !strings.HasPrefix(zipPath, folderPath+"/") { + continue + } + relPath := zipRelPath(folder, f.Name) + im.Logger.Debugw("Found file in template folder", "name", f.Name, "relPath", relPath) + // Skip data.yaml, page files, and email files by relative path + if relPath == "data.yaml" { + im.Logger.Debugw("Skipping data.yaml") + continue + } + if _, ok := pageFiles[relPath]; ok { + im.Logger.Debugw("Skipping page file", "relPath", relPath) + continue + } + if _, ok := emailFiles[relPath]; ok { + im.Logger.Debugw("Skipping emailfile", "relPath", relPath) + continue + } + im.Logger.Debugw("Processing as asset", "relPath", relPath) + // Upload as asset, using relPath as the asset path + created, err := im.createAssetFromZipFile(g, session, f, relPath) + if err != nil { + summary.AssetsErrors++ + summary.AssetsErrorsList = append(summary.AssetsErrorsList, ImportError{ + Type: "asset", + Name: f.Name, + Message: err.Error(), + }) + } else if created { + summary.AssetsCreated++ + summary.AssetsCreatedList = append(summary.AssetsCreatedList, relPath) + } else { + summary.AssetsSkipped++ + summary.AssetsSkippedList = append(summary.AssetsSkippedList, relPath) + } + } + + // PAGE IMPORT --- + for _, page := range dataYaml.Pages { + cleanPageFile := filepath.Clean(filepath.ToSlash(page.File)) + im.Logger.Debugw("Looking for page file", "pageName", page.Name, "pageFile", page.File, "cleanedFile", cleanPageFile, "folder", folder) + + // Use file index for efficient lookup + pageFile, pageFileFound := fileIndex[cleanPageFile] + if !pageFileFound { + pageFile, pageFileFound = fileIndex[strings.ToLower(cleanPageFile)] + if pageFileFound { + im.Logger.Debugw("Found page file with case-insensitive match", "pageName", page.Name, "file", pageFile.Name) + } + } + + if !pageFileFound { + im.Logger.Warnw("Page file not found", "pageName", page.Name, "expectedFile", cleanPageFile, "folder", folder, "availableFiles", len(fileIndex)) + summary.PagesErrors++ + summary.PagesErrorsList = append(summary.PagesErrorsList, ImportError{ + Type: "page", + Name: page.Name, + Message: fmt.Sprintf("page file %s not found in folder %s", page.File, folder), + }) + continue + } + + // Read HTML content + rc, err := pageFile.Open() + if err != nil { + summary.PagesErrors++ + summary.PagesErrorsList = append(summary.PagesErrorsList, ImportError{ + Type: "page", + Name: page.Name, + Message: fmt.Sprintf("failed to open page file: %v", err), + }) + continue + } + content, err := io.ReadAll(rc) + rc.Close() + if err != nil { + summary.PagesErrors++ + summary.PagesErrorsList = append(summary.PagesErrorsList, ImportError{ + Type: "page", + Name: page.Name, + Message: fmt.Sprintf("failed to read page file: %v", err), + }) + continue + } + + // Create new page + newPage := &model.Page{} + name, err := vo.NewString64(page.Name) + if err != nil { + summary.PagesErrors++ + summary.PagesErrorsList = append(summary.PagesErrorsList, ImportError{ + Type: "page", + Name: page.Name, + Message: fmt.Sprintf("failed to create page name: %v", err), + }) + continue + } + newPage.Name = nullable.NewNullableWithValue(*name) + if pageContent, err := vo.NewOptionalString1MB(string(content)); err == nil { + newPage.Content = nullable.NewNullableWithValue(*pageContent) + } + if forCompany && companyID != nil { + newPage.CompanyID = nullable.NewNullableWithValue(*companyID) + } + + // check if page already exists + existing, err := im.PageRepository.GetByNameAndCompanyID(g.Request.Context(), name, companyID, &repository.PageOption{}) + if err != nil && !errors.Is(err, gorm.ErrRecordNotFound) { + summary.PagesErrors++ + summary.PagesErrorsList = append(summary.PagesErrorsList, ImportError{ + Type: "page", + Name: page.Name, + Message: fmt.Sprintf("failed check for existing page: %v", err), + }) + continue + } + // determin if this is a update operation + isUpdate := existing != nil + if isUpdate { + if companyID == nil && existing.CompanyID.IsNull() { + // this is a update operation as both have no company ID + } else if companyID != nil && existing.CompanyID.IsNull() { + // if we are creating with a company id and the existing has none + // then this is actually a create + isUpdate = false + } else if companyID == nil && existing.CompanyID.IsSpecified() && !existing.CompanyID.IsNull() { + // if we have no company id but the existing asset has then this is a create operation + isUpdate = false + } else if companyID != nil && existing.CompanyID.IsSpecified() && !existing.CompanyID.IsNull() { + if companyID.String() != existing.CompanyID.MustGet().String() { + summary.PagesErrors++ + summary.PagesErrorsList = append(summary.PagesErrorsList, ImportError{ + Type: "page", + Name: page.Name, + Message: fmt.Sprintf("page '%s' belongs to another company", page.Name), + }) + continue + } + } + } + if isUpdate { + // update + existingID := existing.ID.MustGet() + err = im.Page.UpdateByID(g.Request.Context(), session, &existingID, newPage) + if err != nil { + summary.PagesErrors++ + summary.PagesErrorsList = append(summary.PagesErrorsList, ImportError{ + Type: "page", + Name: page.Name, + Message: fmt.Sprintf("failed to update page: %v", err), + }) + } else { + summary.PagesUpdated++ + summary.PagesUpdatedList = append(summary.PagesUpdatedList, page.Name) + } + } else { + // create + _, err = im.Page.Create(g.Request.Context(), session, newPage) + if err != nil { + summary.PagesErrors++ + summary.PagesErrorsList = append(summary.PagesErrorsList, ImportError{ + Type: "page", + Name: page.Name, + Message: fmt.Sprintf("failed to create page: %v", err), + }) + } else { + summary.PagesCreated++ + summary.PagesCreatedList = append(summary.PagesCreatedList, page.Name) + } + } + } + + // --- EMAIL IMPORT --- + for _, email := range dataYaml.Emails { + cleanEmailFile := filepath.Clean(filepath.ToSlash(email.File)) + im.Logger.Debugw("Looking for email file", "emailName", email.Name, "emailFile", email.File, "cleanedFile", cleanEmailFile, "folder", folder) + + // Use file index for efficient lookup + emailFile, emailFileFound := fileIndex[cleanEmailFile] + if !emailFileFound { + // Try case-insensitive lookup + emailFile, emailFileFound = fileIndex[strings.ToLower(cleanEmailFile)] + if emailFileFound { + im.Logger.Debugw("Found email file with case-insensitive match", "emailName", email.Name, "file", emailFile.Name) + } + } + + if !emailFileFound { + im.Logger.Warnw("Email file not found", "emailName", email.Name, "expectedFile", cleanEmailFile, "folder", folder, "availableFiles", len(fileIndex)) + summary.EmailsErrors++ + summary.EmailsErrorsList = append(summary.EmailsErrorsList, ImportError{ + Type: "email", + Name: email.Name, + Message: fmt.Sprintf("email file %s not found in folder %s", email.File, folder), + }) + continue + } + + // Read HTML content + rc, err := emailFile.Open() + if err != nil { + summary.EmailsErrors++ + summary.EmailsErrorsList = append(summary.EmailsErrorsList, ImportError{ + Type: "email", + Name: email.Name, + Message: fmt.Sprintf("failed to open email file: %v", err), + }) + continue + } + content, err := io.ReadAll(rc) + rc.Close() + if err != nil { + summary.EmailsErrors++ + summary.EmailsErrorsList = append(summary.EmailsErrorsList, ImportError{ + Type: "email", + Name: email.Name, + Message: fmt.Sprintf("failed to read email file: %v", err), + }) + continue + } + + // Create new email for this context + newEmail := &model.Email{} + if companyID != nil { + newEmail.CompanyID = nullable.NewNullableWithValue(*companyID) + } + if emailContent, err := vo.NewOptionalString1MB(string(content)); err == nil { + newEmail.Content = nullable.NewNullableWithValue(*emailContent) + } + name, err := vo.NewString64(email.Name) + if err == nil { + newEmail.Name = nullable.NewNullableWithValue(*name) + } + if from, err := vo.NewEmail(email.From); err == nil { + newEmail.MailHeaderFrom = nullable.NewNullableWithValue(*from) + } + if envelopeFrom, err := vo.NewMailEnvelopeFrom(email.EnvelopeFrom); err == nil { + newEmail.MailEnvelopeFrom = nullable.NewNullableWithValue(*envelopeFrom) + } + if subject, err := vo.NewOptionalString255(email.Subject); err == nil { + newEmail.MailHeaderSubject = nullable.NewNullableWithValue(*subject) + } + newEmail.AddTrackingPixel = nullable.NewNullableWithValue(true) + if forCompany && companyID != nil { + newEmail.CompanyID = nullable.NewNullableWithValue(*companyID) + } + // check if email already exists + existing, err := im.EmailRepository.GetByNameAndCompanyID(g, name, companyID, &repository.EmailOption{}) + if err != nil && !errors.Is(err, gorm.ErrRecordNotFound) { + summary.EmailsErrors++ + summary.EmailsErrorsList = append(summary.EmailsErrorsList, ImportError{ + Type: "email", + Name: email.Name, + Message: fmt.Sprintf("failed check for existing email: %v", err), + }) + continue + } + // determin if this is a update operation + isUpdate := existing != nil + if isUpdate { + if companyID == nil && existing.CompanyID.IsNull() { + // this is a update operation as both have no company ID + } else if companyID != nil && existing.CompanyID.IsNull() { + // if we are creating with a company id and the existing has none + // then this is actually a create + isUpdate = false + } else if companyID == nil && existing.CompanyID.IsSpecified() && !existing.CompanyID.IsNull() { + // if we have no company id but the existing email has then this is a create operation + isUpdate = false + } else if companyID != nil && existing.CompanyID.IsSpecified() && !existing.CompanyID.IsNull() { + if companyID.String() != existing.CompanyID.MustGet().String() { + summary.EmailsErrors++ + summary.EmailsErrorsList = append(summary.EmailsErrorsList, ImportError{ + Type: "email", + Name: email.Name, + Message: fmt.Sprintf("email '%s' belongs to another company", email.Name), + }) + continue + } + } + } + if isUpdate { + // update + existingID := existing.ID.MustGet() + err = im.Email.UpdateByID(g, session, &existingID, newEmail) + if err != nil { + summary.EmailsErrors++ + summary.EmailsErrorsList = append(summary.EmailsErrorsList, ImportError{ + Type: "email", + Name: email.Name, + Message: fmt.Sprintf("failed to update email: %v", err), + }) + } else { + summary.EmailsUpdated++ + summary.EmailsUpdatedList = append(summary.EmailsUpdatedList, email.Name) + } + } else { + // create + _, err = im.Email.Create(g.Request.Context(), session, newEmail) + if err != nil { + summary.EmailsErrors++ + summary.EmailsErrorsList = append(summary.EmailsErrorsList, ImportError{ + Type: "email", + Name: email.Name, + Message: fmt.Sprintf("failed to create email: %v", err), + }) + } else { + summary.EmailsCreated++ + summary.EmailsCreatedList = append(summary.EmailsCreatedList, email.Name) + } + } + } + } + + return summary, nil +} + +// createAssetFromZipFile creates an Asset from a zip file entry and saves it directly +// returns true if a new asset was created +func (im *Import) createAssetFromZipFile( + g *gin.Context, + session *model.Session, + f *zip.File, + relativePath string, +) (bool, error) { + // Check if asset already exists by path + existing, err := im.Asset.GetByPath(g.Request.Context(), session, relativePath) + + if err == nil && existing != nil { + // Asset already exists - check if it has a company ID (if so, skip it) + if existing.CompanyID.IsSpecified() && !existing.CompanyID.IsNull() { + // Asset belongs to a company - skip it to maintain global-only policy + return false, nil + } + // Asset already exists and is global + return false, nil + } else if err != nil && !errors.Is(err, gorm.ErrRecordNotFound) { + return false, err + } + + // Open the file from zip + rc, err := f.Open() + if err != nil { + return false, err + } + defer rc.Close() + + // Read file content + content, err := io.ReadAll(rc) + if err != nil { + return false, err + } + + // Use the provided relativePath for the asset path + fullRelativePath := relativePath + + // Create asset model + asset := &model.Asset{} + + // Set the name from filename + filename := filepath.Base(f.Name) + if name, err := vo.NewOptionalString127(filename); err == nil { + asset.Name = nullable.NewNullableWithValue(*name) + } + + // Set the relative path + if path, err := vo.NewRelativeFilePath(fullRelativePath); err == nil { + asset.Path = nullable.NewNullableWithValue(*path) + } + + // Assets are always global/shared - never set company ID + + // Save asset to database first + id, err := im.Asset.AssetRepository.Insert(g, asset) + if err != nil { + return false, err + } + + // Build the file system path - assets are always stored in shared folder + contextFolder := "shared" + + // Build full file path + pathWithRootAndDomainContext, err := securejoin.SecureJoin(im.Asset.RootFolder, contextFolder) + if err != nil { + return false, err + } + pathWithRootAndDomainContext, err = securejoin.SecureJoin(pathWithRootAndDomainContext, fullRelativePath) + if err != nil { + return false, err + } + + // Upload the file content directly + contentBuffer := bytes.NewBuffer(content) + err = im.File.UploadFile(g, pathWithRootAndDomainContext, contentBuffer, true) + if err != nil { + // Clean up the database entry if file upload fails + im.Asset.AssetRepository.DeleteByID(g, id) + return false, err + } + + return true, nil +} diff --git a/backend/service/install.go b/backend/service/install.go new file mode 100644 index 0000000..4fd7469 --- /dev/null +++ b/backend/service/install.go @@ -0,0 +1,130 @@ +package service + +import ( + "context" + + "github.com/go-errors/errors" + + "github.com/google/uuid" + "github.com/oapi-codegen/nullable" + "github.com/phishingclub/phishingclub/data" + "github.com/phishingclub/phishingclub/errs" + "github.com/phishingclub/phishingclub/model" + "github.com/phishingclub/phishingclub/password" + "github.com/phishingclub/phishingclub/repository" + "github.com/phishingclub/phishingclub/vo" + "gorm.io/gorm" +) + +// InstallSetup sets up the intial administrator account +// this is called by the installation process and is not part of the normal application flow +// it must not use services that require authentication or be used by any other part of the application +type InstallSetup struct { + Common + UserRepository *repository.User + RoleRepository *repository.Role + CompanyRepository *repository.Company + PasswordHasher *password.Argon2Hasher +} + +// SetupAccounts sets up the accounts needed for the system to function +func (s *InstallSetup) SetupAccounts( + ctx context.Context, +) (*model.User, *vo.ReasonableLengthPassword, error) { + user, password, err := s.setupInitialAdministratorAccount(ctx) + if err != nil { + s.Logger.Errorw("failed to setup the initial administrator account", "error", err) + return nil, nil, errs.Wrap(err) + } + return user, password, nil +} + +// setupInitialAdministratorAccount sets up the initial administrator account +func (s *InstallSetup) setupInitialAdministratorAccount( + ctx context.Context, +) (*model.User, *vo.ReasonableLengthPassword, error) { + username := vo.NewUsernameMust(data.DefaultSacrificalAccountUsername) + nullableUsername := nullable.NewNullableWithValue(*username) + // get the admin user if it already exists + // this could happend if the installation was started, but not completed + adminUser, err := s.UserRepository.GetByUsername( + ctx, + username, + &repository.UserOption{ + WithRole: true, + WithCompany: true, + }, + ) + if err != nil && !errors.Is(err, gorm.ErrRecordNotFound) { + return nil, nil, errs.Wrap(err) + } + password, err := vo.NewReasonableLengthPasswordGenerated() + if err != nil { + return nil, nil, errs.Wrap(err) + } + // if the admin account does not exist, create it + if adminUser == nil { + adminRole, err := s.RoleRepository.GetByName( + ctx, + data.RoleSuperAdministrator, + ) + if err != nil { + return nil, nil, errs.Wrap(err) + } + email := nullable.NewNullableWithValue(*vo.NewEmailMust(data.DefaultSacrificalAccountEmail)) + fullname := nullable.NewNullableWithValue(*vo.NewUserFullnameMust(data.DefaultSacrificalAccountName)) + tmpAdminID := nullable.NewNullableWithValue(uuid.New()) + tmpAdmin := &model.User{ + ID: tmpAdminID, + Name: fullname, + Username: nullableUsername, + Email: email, + RoleID: nullable.NewNullableWithValue(adminRole.ID), + } + hash, err := s.PasswordHasher.Hash(password.String()) + if err != nil { + return nil, nil, errs.Wrap(err) + } + + newUserID, err := s.UserRepository.Insert( + ctx, + tmpAdmin, + hash, + "", + ) + if err != nil { + return nil, nil, errs.Wrap(err) + } + adminUser, err = s.UserRepository.GetByID( + ctx, + newUserID, + &repository.UserOption{ + WithRole: true, + WithCompany: false, + }, + ) + if err != nil { + return nil, nil, errs.Wrap(err) + } + } else { + username, err := vo.NewUsername(data.DefaultSacrificalAccountUsername) + if err != nil { + return nil, nil, errs.Wrap(err) + } + // if the admin account exists, update the password + hash, err := s.PasswordHasher.Hash(password.String()) + if err != nil { + return nil, nil, errs.Wrap(err) + } + err = s.UserRepository.UpdatePasswordHashByUsername( + ctx, + username, + hash, + ) + if err != nil { + return nil, nil, errs.Wrap(err) + } + return adminUser, password, nil + } + return adminUser, password, nil +} diff --git a/backend/service/option.go b/backend/service/option.go new file mode 100644 index 0000000..d300928 --- /dev/null +++ b/backend/service/option.go @@ -0,0 +1,179 @@ +package service + +import ( + "context" + "strconv" + + "github.com/go-errors/errors" + + "github.com/phishingclub/phishingclub/data" + "github.com/phishingclub/phishingclub/errs" + "github.com/phishingclub/phishingclub/model" + "github.com/phishingclub/phishingclub/repository" + "github.com/phishingclub/phishingclub/validate" + "github.com/phishingclub/phishingclub/vo" + "gorm.io/gorm" +) + +// Option is a service for Option +type Option struct { + Common + OptionRepository *repository.Option +} + +// GetOption gets an option +func (o *Option) GetOption( + ctx context.Context, + session *model.Session, + key string, +) (*model.Option, error) { + ae := NewAuditEvent("Option.GetOption", session) + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + o.LogAuthError(err) + return nil, errs.Wrap(err) + } + if !isAuthorized { + o.AuditLogNotAuthorized(ae) + return nil, errs.ErrAuthorizationFailed + } + opt, err := o.OptionRepository.GetByKey( + ctx, + key, + ) + if errors.Is(err, gorm.ErrRecordNotFound) { + return nil, errs.Wrap(err) + } + if err != nil { + o.Logger.Errorw("failed to get option with key", + "key", key, + "error", err, + ) + return nil, errs.Wrap(err) + } + return opt, nil +} + +// GetOption gets an option +func (o *Option) GetOptionWithoutAuth( + ctx context.Context, + key string, +) (*model.Option, error) { + opt, err := o.OptionRepository.GetByKey( + ctx, + key, + ) + if errors.Is(err, gorm.ErrRecordNotFound) { + return nil, errs.Wrap(err) + } + if err != nil { + o.Logger.Errorw("failed to get option with key", + "key", key, + "error", err, + ) + return nil, errs.Wrap(err) + } + return opt, nil +} + +// MaskSSOSecret masks the sso secret +func (o *Option) MaskSSOSecret(opt *model.Option) (*model.Option, error) { + a, err := model.NewSSOOptionFromJSON([]byte(opt.Value.String())) + if err != nil { + o.Logger.Errorw("failed to read sso option", "error", err) + return nil, errs.Wrap(err) + } + // mask the key + a.ClientSecret = *vo.NewOptionalString1024Must("") + b, err := a.ToJSON() + if err != nil { + o.Logger.Errorw("failed to mask sso secret", "error", err) + return nil, errs.Wrap(err) + } + c, err := vo.NewOptionalString1MB(string(b)) + if err != nil { + o.Logger.Errorw("failed to mask secret option", "error", err) + return nil, errs.Wrap(err) + } + opt.Value = *c + return opt, nil +} + +// SetOptionByKey sets an option +func (o *Option) SetOptionByKey( + ctx context.Context, + session *model.Session, + option *model.Option, +) error { + ae := NewAuditEvent("Option.SetOptionByKey", session) + ae.Details["key"] = option.Key.String() + ae.Details["value"] = option.Value.String() + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + o.LogAuthError(err) + return err + } + if !isAuthorized { + o.AuditLogNotAuthorized(ae) + return errs.ErrAuthorizationFailed + } + k := option.Key.String() + v := option.Value.String() + switch k { + case data.OptionKeyMaxFileUploadSizeMB: + if n, err := strconv.Atoi(v); err != nil || n <= 0 { + o.Logger.Debugw("invalid max file size", + "n", n, + "error", err, + ) + return validate.WrapErrorWithField( + errs.NewValidationError( + errors.New("invalid max"), + ), + "max file size", + ) + } + case data.OptionKeyRepeatOffenderMonths: + if n, err := strconv.Atoi(v); err != nil || n <= 0 { + o.Logger.Debugw("invalid repeat offender months", + "n", n, + "error", err, + ) + return validate.WrapErrorWithField( + errs.NewValidationError( + errors.New("invalid months"), + ), + "repeat offender months", + ) + } + case data.OptionKeyLogLevel: + // is allow listed + fallthrough + case data.OptionKeyDBLogLevel: + // is allow listed + fallthrough + case data.OptionKeyAdminSSOLogin: + // is allow listed + default: + o.Logger.Debugw("invalid settings key", "key", k) + return validate.WrapErrorWithField( + errs.NewValidationError( + errors.New("invalid option"), + ), + "key", + ) + } + // update options + err = o.OptionRepository.UpdateByKey( + ctx, + option, + ) + if err != nil { + o.Logger.Errorw("failed to update option by key", "error", err) + return err + } + o.AuditLogAuthorized(ae) + return nil +} diff --git a/backend/service/page.go b/backend/service/page.go new file mode 100644 index 0000000..2fe46d2 --- /dev/null +++ b/backend/service/page.go @@ -0,0 +1,319 @@ +package service + +import ( + "context" + + "github.com/go-errors/errors" + + "github.com/google/uuid" + "github.com/phishingclub/phishingclub/data" + "github.com/phishingclub/phishingclub/errs" + "github.com/phishingclub/phishingclub/model" + "github.com/phishingclub/phishingclub/repository" + "github.com/phishingclub/phishingclub/validate" + "gorm.io/gorm" +) + +// Page is a Page service +type Page struct { + Common + PageRepository *repository.Page + CampaignRepository *repository.Campaign + CampaignTemplateService *CampaignTemplate +} + +// Create creates a new page +func (p *Page) Create( + ctx context.Context, + session *model.Session, + page *model.Page, +) (*uuid.UUID, error) { + ae := NewAuditEvent("Page.Create", session) + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + p.LogAuthError(err) + return nil, errs.Wrap(err) + } + if !isAuthorized { + p.AuditLogNotAuthorized(ae) + return nil, errs.ErrAuthorizationFailed + } + var companyID *uuid.UUID + if cid, err := page.CompanyID.Get(); err == nil { + companyID = &cid + } + + // validate data + if err := page.Validate(); err != nil { + p.Logger.Errorw("failed to validate page", "error", err) + return nil, errs.Wrap(err) + } + // check uniqueness + name := page.Name.MustGet() + isOK, err := repository.CheckNameIsUnique( + ctx, + p.PageRepository.DB, + "pages", + name.String(), + companyID, + nil, + ) + if err != nil { + p.Logger.Errorw("failed to check page uniqueness", "error", err) + return nil, errs.Wrap(err) + } + if !isOK { + p.Logger.Debugw("page name is already taken", "name", name.String()) + return nil, validate.WrapErrorWithField(errors.New("is not unique"), "name") + } + // create page + id, err := p.PageRepository.Insert( + ctx, + page, + ) + if err != nil { + p.Logger.Errorw("failed to create page", "error", err) + return nil, errs.Wrap(err) + } + ae.Details["id"] = id.String() + p.AuditLogAuthorized(ae) + + return id, nil +} + +// GetAll gets pages +func (p *Page) GetAll( + ctx context.Context, + session *model.Session, + companyID *uuid.UUID, + options *repository.PageOption, +) (*model.Result[model.Page], error) { + result := model.NewEmptyResult[model.Page]() + ae := NewAuditEvent("Page.GetAll", session) + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + p.LogAuthError(err) + return result, errs.Wrap(err) + } + if !isAuthorized { + p.AuditLogNotAuthorized(ae) + return result, errs.ErrAuthorizationFailed + } + result, err = p.PageRepository.GetAll( + ctx, + companyID, + options, + ) + if err != nil { + p.Logger.Errorw("failed to get pages", "error", err) + return result, errs.Wrap(err) + } + // no audit log on read + return result, nil +} + +// GetByID gets a page by ID +func (p *Page) GetByID( + ctx context.Context, + session *model.Session, + id *uuid.UUID, + options *repository.PageOption, +) (*model.Page, error) { + ae := NewAuditEvent("Page.GetByID", session) + ae.Details["id"] = id.String() + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + p.LogAuthError(err) + return nil, errs.Wrap(err) + } + if !isAuthorized { + p.AuditLogNotAuthorized(ae) + return nil, errs.ErrAuthorizationFailed + } + // get page + page, err := p.PageRepository.GetByID( + ctx, + id, + options, + ) + if errors.Is(err, gorm.ErrRecordNotFound) { + // return early this is not a an error + return nil, errs.Wrap(err) + } + if err != nil { + p.Logger.Errorw("failed to get page by ID", "error", err) + return nil, errs.Wrap(err) + } + // no audit log on read + + return page, nil +} + +// GetByCompanyID gets a page by company ID +func (p *Page) GetByCompanyID( + ctx context.Context, + session *model.Session, + companyID *uuid.UUID, + options *repository.PageOption, +) (*model.Result[model.Page], error) { + result := model.NewEmptyResult[model.Page]() + ae := NewAuditEvent("Page.GetByCompanyID", session) + if companyID != nil { + ae.Details["companyId"] = companyID.String() + } + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + p.LogAuthError(err) + return result, errs.Wrap(err) + } + if !isAuthorized { + p.AuditLogNotAuthorized(ae) + return result, errs.ErrAuthorizationFailed + } + // get pages + result, err = p.PageRepository.GetAllByCompanyID( + ctx, + companyID, + &repository.PageOption{}, + ) + if errors.Is(err, gorm.ErrRecordNotFound) { + // return early this is not a an error + return result, errs.Wrap(err) + } + if err != nil { + p.Logger.Errorw("failed to get page by company ID", "error", err) + return result, errs.Wrap(err) + } + // no audit log on read + + return result, nil +} + +// UpdateByID updates a page by ID +func (p *Page) UpdateByID( + ctx context.Context, + session *model.Session, + id *uuid.UUID, + page *model.Page, +) error { + ae := NewAuditEvent("Page.UpdateByID", session) + ae.Details["id"] = id.String() + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + p.LogAuthError(err) + return err + } + if !isAuthorized { + p.AuditLogNotAuthorized(ae) + return errs.ErrAuthorizationFailed + } + // get current + current, err := p.PageRepository.GetByID( + ctx, + id, + &repository.PageOption{}, + ) + if errors.Is(err, gorm.ErrRecordNotFound) { + p.Logger.Debugw("failed to update page by ID", "error", err) + return err + } + if err != nil { + p.Logger.Errorw("failed to update page by ID", "error", err) + return err + } + // update page - if a field is present and not null, update it + if v, err := page.Name.Get(); err == nil { + // check uniqueness + var companyID *uuid.UUID + if cid, err := current.CompanyID.Get(); err == nil { + companyID = &cid + } + name := page.Name.MustGet() + isOK, err := repository.CheckNameIsUnique( + ctx, + p.PageRepository.DB, + "pages", + name.String(), + companyID, + id, + ) + if err != nil { + p.Logger.Errorw("failed to check page uniqueness", "error", err) + return err + } + if !isOK { + p.Logger.Debugw("page name is already taken", "name", name.String()) + return validate.WrapErrorWithField(errors.New("is not unique"), "name") + } + current.Name.Set(v) + } + if v, err := page.Content.Get(); err == nil { + current.Content.Set(v) + } + // update page + err = p.PageRepository.UpdateByID( + ctx, + id, + current, + ) + if err != nil { + p.Logger.Errorw("failed to update page by ID", "error", err) + return err + } + ae.Details["id"] = id.String() + p.AuditLogNotAuthorized(ae) + + return nil +} + +// DeleteByID deletes a page by ID +func (p *Page) DeleteByID( + ctx context.Context, + session *model.Session, + id *uuid.UUID, +) error { + ae := NewAuditEvent("Page.DeleteByID", session) + ae.Details["id"] = id.String() + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + p.LogAuthError(err) + return err + } + if !isAuthorized { + p.AuditLogNotAuthorized(ae) + return errs.ErrAuthorizationFailed + } + // remove the relation to campaign allow deny PageRepository + err = p.CampaignRepository.RemoveDenyPageByDenyPageIDs( + ctx, + []*uuid.UUID{id}, + ) + // delete the relation from campaign templates + err = p.CampaignTemplateService.RemovePagesByPageID( + ctx, + session, + id, + ) + if err != nil { + p.Logger.Errorw("failed to remove page ID relations from campaign templates", "error", err) + return err + } + // delete page + err = p.PageRepository.DeleteByID( + ctx, + id, + ) + if err != nil { + p.Logger.Errorw("failed to delete page by ID", "error", err) + return err + } + p.AuditLogAuthorized(ae) + + return nil +} diff --git a/backend/service/recipient.go b/backend/service/recipient.go new file mode 100644 index 0000000..a560c01 --- /dev/null +++ b/backend/service/recipient.go @@ -0,0 +1,595 @@ +package service + +import ( + "context" + "fmt" + + "github.com/go-errors/errors" + + "github.com/google/uuid" + "github.com/phishingclub/phishingclub/data" + "github.com/phishingclub/phishingclub/errs" + "github.com/phishingclub/phishingclub/model" + "github.com/phishingclub/phishingclub/repository" + "github.com/phishingclub/phishingclub/validate" + "github.com/phishingclub/phishingclub/vo" + "gorm.io/gorm" +) + +// Recipient is the Recipient service +type Recipient struct { + Common + RecipientRepository *repository.Recipient + RecipientGroupRepository *repository.RecipientGroup + CampaignRepository *repository.Campaign + CampaignRecipientRepository *repository.CampaignRecipient +} + +// Create creates a new recipient +func (r *Recipient) Create( + ctx context.Context, + session *model.Session, + recipient *model.Recipient, +) (*uuid.UUID, error) { + ae := NewAuditEvent("Recipient.Create", session) + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + r.LogAuthError(err) + return nil, errs.Wrap(err) + } + if !isAuthorized { + r.AuditLogNotAuthorized(ae) + return nil, errs.ErrAuthorizationFailed + } + // validate recipient + if err := recipient.Validate(); err != nil { + r.Logger.Debugw("failed to create recipient - recipient is invalid", "error", err) + return nil, errs.Wrap(err) + } + email := recipient.Email.MustGet() + // check if recipient already exists to avoid a unique constraint error + // and gorm does not return a unique constraint error but a string error depending on DB + _, err = r.RecipientRepository.GetByEmail( + ctx, + &email, + ) + if err != nil && !errors.Is(err, gorm.ErrRecordNotFound) { + r.Logger.Errorw("failed to create recipient - failed to get recipient by any identifier", "error", err) + return nil, errs.Wrap(err) + } + if err == nil { + r.Logger.Debugw("email is already taken", "email", email.String()) + return nil, validate.WrapErrorWithField(errors.New("not unique"), "email") + } + id, err := r.RecipientRepository.Insert( + ctx, + recipient, + ) + if err != nil { + r.Logger.Errorw("failed to create recipient - failed to insert recipient", "error", err) + return nil, errs.Wrap(err) + } + ae.Details["id"] = id.String() + r.AuditLogAuthorized(ae) + + return id, nil +} + +// UpdateByID updates a recipient by ID +func (r *Recipient) UpdateByID( + ctx context.Context, + session *model.Session, + id *uuid.UUID, + incoming *model.Recipient, +) error { + ae := NewAuditEvent("Recipient.UpdateByID", session) + ae.Details["id"] = id.String() + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + r.LogAuthError(err) + return err + } + if !isAuthorized { + r.AuditLogNotAuthorized(ae) + return errs.ErrAuthorizationFailed + } + // check if recipient already exists and the user is allowed to update the recipient + current, err := r.RecipientRepository.GetByID( + ctx, + id, + &repository.RecipientOption{}, + ) + if err != nil && !errors.Is(err, gorm.ErrRecordNotFound) { + r.Logger.Errorw("failed to update recipient - failed to get recipient by any identifier", "error", err) + return err + } + if errors.Is(err, gorm.ErrRecordNotFound) { + r.Logger.Debug("failed to update recipient - recipient not found") + return err + } + // update config - if a field is present and not null, update it + + // if the email is changed, check that another recipient is not using this email already + if v, err := incoming.Email.Get(); err != nil { + if v.String() != current.Email.MustGet().String() { + var companyID *uuid.UUID + if current.CompanyID != nil { + if cid, err := current.CompanyID.Get(); err != nil { + companyID = &cid + } + } + _, err := r.RecipientRepository.GetByEmailAndCompanyID( + ctx, + &v, + companyID, + ) + if err != nil && !errors.Is(err, gorm.ErrRecordNotFound) { + r.Logger.Errorw("failed check existing recipient email", "error", err) + return err + } + if !errors.Is(err, gorm.ErrRecordNotFound) { + r.Logger.Debugw("email is already taken", "email", v.String()) + s := fmt.Sprintf("email '%s' is already used by another recipient", v.String()) + return validate.WrapErrorWithField(errors.New("not unique"), s) + } + } + current.Email.Set(v) + } + if v, err := incoming.Phone.Get(); err == nil { + current.Phone.Set(v) + } + if v, err := incoming.ExtraIdentifier.Get(); err == nil { + current.ExtraIdentifier.Set(v) + } + if v, err := incoming.FirstName.Get(); err == nil { + current.FirstName.Set(v) + } + if v, err := incoming.LastName.Get(); err == nil { + current.LastName.Set(v) + } + if v, err := incoming.Position.Get(); err == nil { + current.Position.Set(v) + } + if v, err := incoming.Department.Get(); err == nil { + current.Department.Set(v) + } + if v, err := incoming.City.Get(); err == nil { + current.City.Set(v) + } + if v, err := incoming.Country.Get(); err == nil { + current.Country.Set(v) + } + if v, err := incoming.Misc.Get(); err == nil { + current.Misc.Set(v) + } + // validate + if err := current.Validate(); err != nil { + r.Logger.Debugw("failed to update recipient - recipient is invalid", "error", err) + return err + } + // save config + err = r.RecipientRepository.UpdateByID( + ctx, + id, + current, + ) + if err != nil { + r.Logger.Errorw("failed to update recipient - failed to update recipient", "error", err) + return err + } + r.AuditLogAuthorized(ae) + + return nil +} + +// GetByID gets a recipient by ID +func (r *Recipient) GetByID( + ctx context.Context, + session *model.Session, + id *uuid.UUID, + options *repository.RecipientOption, +) (*model.Recipient, error) { + ae := NewAuditEvent("Recipient.GetByID", session) + ae.Details["id"] = id.String() + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + r.LogAuthError(err) + return nil, errs.Wrap(err) + } + if !isAuthorized { + r.AuditLogNotAuthorized(ae) + return nil, errs.ErrAuthorizationFailed + } + // get recipient + recipient, err := r.RecipientRepository.GetByID( + ctx, + id, + options, + ) + if err != nil { + r.Logger.Errorw("failed to get recipient by id - failed to get recipient", "error", err) + return nil, errs.Wrap(err) + } + // no audit on read + + return recipient, nil +} + +// GetByCompanyID gets a recipients by company ID +func (r *Recipient) GetByCompanyID( + ctx context.Context, + session *model.Session, + id *uuid.UUID, + options *repository.RecipientOption, +) (*model.Result[model.Recipient], error) { + result := model.NewEmptyResult[model.Recipient]() + ae := NewAuditEvent("Recipient.GetByCompanyID", session) + ae.Details["id"] = id.String() + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + r.LogAuthError(err) + return result, errs.Wrap(err) + } + if !isAuthorized { + r.AuditLogNotAuthorized(ae) + return result, errs.ErrAuthorizationFailed + } + // get recipients + result, err = r.RecipientRepository.GetAllByCompanyID( + ctx, + id, + options, + ) + if err != nil { + r.Logger.Errorw("failed to get recipients by company id - failed to get recipient", "error", err) + return result, errs.Wrap(err) + } + // no audit on read + return result, nil +} + +// GetStatsByID get campaign events stats by recipient ID +func (r *Recipient) GetStatsByID( + ctx context.Context, + session *model.Session, + recipientID *uuid.UUID, +) (*model.RecipientCampaignStatsView, error) { + ae := NewAuditEvent("Recipient.GetStatsByID", session) + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + r.LogAuthError(err) + return nil, errs.Wrap(err) + } + if !isAuthorized { + r.AuditLogNotAuthorized(ae) + return nil, errs.ErrAuthorizationFailed + } + // get stats + stats, err := r.RecipientRepository.GetStatsByID( + ctx, + recipientID, + ) + if err != nil { + r.Logger.Errorw("failed to get all recipient events", "error", err) + return nil, errs.Wrap(err) + } + // no audit on read + + return stats, nil +} + +// GetAllCampaignEvents get events by recipient ID +// gets all events if campaignID is nil +func (r *Recipient) GetAllCampaignEvents( + ctx context.Context, + session *model.Session, + recipientID *uuid.UUID, + campaignID *uuid.UUID, + queryArgs *vo.QueryArgs, +) (*model.Result[model.RecipientCampaignEvent], error) { + result := model.NewEmptyResult[model.RecipientCampaignEvent]() + ae := NewAuditEvent("Recipient.GetAllCampaignEvents", session) + if recipientID != nil { + ae.Details["recipientId"] = recipientID.String() + } + if campaignID != nil { + ae.Details["campaignId"] = campaignID.String() + } + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + r.LogAuthError(err) + return result, errs.Wrap(err) + } + if !isAuthorized { + r.AuditLogNotAuthorized(ae) + return result, errs.ErrAuthorizationFailed + } + // get all events + result, err = r.RecipientRepository.GetAllCampaignEvents( + ctx, + recipientID, + campaignID, + queryArgs, + ) + if err != nil { + r.Logger.Errorw("failed to get all recipient events", "error", err) + return nil, errs.Wrap(err) + } + // no audit log on read + return result, nil +} + +// GetAll gets all recipients +func (r *Recipient) GetAll( + ctx context.Context, + companyID *uuid.UUID, // can be null + session *model.Session, + options *repository.RecipientOption, +) (*model.Result[model.RecipientView], error) { + result := model.NewEmptyResult[model.RecipientView]() + ae := NewAuditEvent("Recipient.GetAll", session) + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + r.LogAuthError(err) + return result, errs.Wrap(err) + } + if !isAuthorized { + r.AuditLogNotAuthorized(ae) + return result, errs.ErrAuthorizationFailed + } + // get all recipients + result, err = r.RecipientRepository.GetAll( + ctx, + companyID, + options, + ) + if err != nil { + r.Logger.Errorw("failed to get all recipients - failed to get all recipients", "error", err) + return result, errs.Wrap(err) + } + // no audit on read + return result, nil +} + +func (r *Recipient) GetRepeatOffenderCount( + ctx context.Context, + session *model.Session, + companyID *uuid.UUID, +) (int64, error) { + ae := NewAuditEvent("Recipient.GetRepeatOffenderCount", session) + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + r.LogAuthError(err) + return 0, errs.Wrap(err) + } + if !isAuthorized { + r.AuditLogNotAuthorized(ae) + return 0, errs.ErrAuthorizationFailed + } + + count, err := r.RecipientRepository.GetRepeatOffenderCount(ctx, companyID) + if err != nil { + r.Logger.Errorw("failed to get repeat offender count", "error", err) + return 0, errs.Wrap(err) + } + + return count, nil +} + +// GetByEmail gets a recipient by email +func (r *Recipient) GetByEmail( + ctx context.Context, + session *model.Session, + email *vo.Email, + companyID *uuid.UUID, +) (*model.Recipient, error) { + ae := NewAuditEvent("Recipient.GetByEmail", session) + ae.Details["email"] = email.String() + if companyID != nil { + ae.Details["companyId"] = companyID.String() + } + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + r.LogAuthError(err) + return nil, errs.Wrap(err) + } + if !isAuthorized { + r.AuditLogNotAuthorized(ae) + return nil, errs.ErrAuthorizationFailed + } + // get recipient + recipient, err := r.RecipientRepository.GetByEmailAndCompanyID( + ctx, + email, + companyID, + ) + if errors.Is(err, gorm.ErrRecordNotFound) { + return nil, errs.Wrap(err) + } + if err != nil { + r.Logger.Errorw("failed to get recipient by any identifier - failed to get recipient", + "error", err, + ) + return nil, errs.Wrap(err) + } + // no audit on read + + return recipient, nil +} + +// Import imports recipients +// if the recipient does not exists, it will be created and added to the group +// if the recipient exits, it will be updated and added to the group +func (r *Recipient) Import( + ctx context.Context, + session *model.Session, + recipients []*model.Recipient, + ignoreOverwriteEmptyFields bool, + companyID *uuid.UUID, +) ([]*uuid.UUID, error) { + ae := NewAuditEvent("Recipient.Import", session) + recipientsIDs := []*uuid.UUID{} + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + r.LogAuthError(err) + return recipientsIDs, errs.Wrap(err) + } + if !isAuthorized { + r.AuditLogNotAuthorized(ae) + return recipientsIDs, errs.ErrAuthorizationFailed + } + if len(recipients) == 0 { + return recipientsIDs, validate.WrapErrorWithField(errors.New("no recipients"), "add recipients") + } + // first validate all the entries + for _, recipient := range recipients { + if err := recipient.Validate(); err != nil { + return recipientsIDs, errs.Wrap(err) + } + } + // if the recipient does not exist, create it + // if the recipient exists, update it + for _, incoming := range recipients { + // check if the recipient exists + email := incoming.Email.MustGet() + current, err := r.RecipientRepository.GetByEmail( + ctx, + &email, + "id", "email", "company_id", + ) + if err != nil && !errors.Is(err, gorm.ErrRecordNotFound) { + r.Logger.Debugw("failed to import recipients - failed to get recipient", "error", err) + return recipientsIDs, errs.Wrap(err) + } + if current == nil { + // create recipient + if companyID != nil { + incoming.CompanyID.Set(*companyID) + } + recipientID, err := r.Create( + ctx, + session, + incoming, + ) + if err != nil { + r.Logger.Debugw("failed to import recipients - failed to create recipient", + "error", err, + ) + return recipientsIDs, errs.Wrap(err) + } + recipientsIDs = append(recipientsIDs, recipientID) + } else { + // set the companyID to NOT SET, so it is not overwritten if supplied + incoming.CompanyID.SetUnspecified() + if ignoreOverwriteEmptyFields { + incoming.NullifyEmptyOptionals() + } else { + incoming.EmptyStringNulledOptionals() + } + // update recipient + recipientID := current.ID.MustGet() + err = r.UpdateByID( + ctx, + session, + &recipientID, + incoming, + ) + if err != nil { + r.Logger.Debugw("failed to import recipients - failed to update recipient", + "error", err, + ) + return recipientsIDs, errs.Wrap(err) + } + recipientsIDs = append(recipientsIDs, &recipientID) + } + } + r.AuditLogAuthorized(ae) + + return recipientsIDs, nil +} + +// Delete deletes a recipient +func (r *Recipient) DeleteByID( + ctx context.Context, + session *model.Session, + id *uuid.UUID, +) error { + ae := NewAuditEvent("Recipient.DeleteByID", session) + ae.Details["id"] = id.String() + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + r.LogAuthError(err) + return err + } + if !isAuthorized { + r.AuditLogNotAuthorized(ae) + return errs.ErrAuthorizationFailed + } + // remove recipient from all groups + err = r.RecipientGroupRepository.RemoveRecipientByIDFromAllGroups(ctx, id) + if err != nil { + r.Logger.Errorw("failed to delete recipient - failed to remove recipient from all groups", + "error", err, + ) + return err + } + // if the recipient is in any active campaign, cancel the recipient sending + err = r.CampaignRecipientRepository.CancelInActiveCampaigns(ctx, id) + if err != nil { + r.Logger.Errorw("failed to cancel campaign recipient in active campaigns", "error", err) + return err + } + // anonymize all recipient data + anonymizedID := uuid.New() + err = r.CampaignRecipientRepository.Anonymize( + ctx, + id, + &anonymizedID, + ) + if err != nil { + r.Logger.Errorw("failed to add anonymized ID to campaign recipient", "error", err) + return err + } + // anonymize events and assign each anonymized ID so the events can still be tracked + err = r.CampaignRepository.AnonymizeCampaignEventsByRecipientID( + ctx, + id, + &anonymizedID, + ) + if err != nil { + r.Logger.Errorw("failed to anonymize campaign event", "error", err) + return err + } + // remove recipient id from all campaign recipients + err = r.CampaignRecipientRepository.RemoveRecipientIDByRecipientID( + ctx, + id, + ) + if err != nil { + r.Logger.Errorw("failed to remove recipient id from campaign recipient", "error", err) + return err + } + // delete recipient + err = r.RecipientRepository.DeleteByID( + ctx, + id, + ) + if err != nil { + r.Logger.Errorw("failed to delete recipient - failed to delete recipient", "error", err) + return err + } + r.AuditLogAuthorized(ae) + + return nil +} diff --git a/backend/service/recipientGroup.go b/backend/service/recipientGroup.go new file mode 100644 index 0000000..b8879ff --- /dev/null +++ b/backend/service/recipientGroup.go @@ -0,0 +1,597 @@ +package service + +import ( + "context" + + "github.com/go-errors/errors" + + "github.com/google/uuid" + "github.com/phishingclub/phishingclub/data" + "github.com/phishingclub/phishingclub/errs" + "github.com/phishingclub/phishingclub/model" + "github.com/phishingclub/phishingclub/repository" + "github.com/phishingclub/phishingclub/validate" + "gorm.io/gorm" +) + +// RecipientGroup is a recipient group service +type RecipientGroup struct { + Common + CampaignRepository *repository.Campaign + CampaignRecipientRepository *repository.CampaignRecipient + RecipientGroupRepository *repository.RecipientGroup + RecipientRepository *repository.Recipient + RecipientService *Recipient + DB *gorm.DB +} + +// Create inserts a new recipient group +func (r *RecipientGroup) Create( + ctx context.Context, + session *model.Session, + group *model.RecipientGroup, +) (*uuid.UUID, error) { + ae := NewAuditEvent("RecipientGroup.Create", session) + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + r.LogAuthError(err) + return nil, errs.Wrap(err) + } + if !isAuthorized { + r.AuditLogNotAuthorized(ae) + return nil, errs.ErrAuthorizationFailed + } + // check uniqueness + var companyID *uuid.UUID + if cid, err := group.CompanyID.Get(); err == nil { + companyID = &cid + } + name := group.Name.MustGet() + isOK, err := repository.CheckNameIsUnique( + ctx, + r.RecipientRepository.DB, + "recipient_groups", + name.String(), + companyID, + nil, + ) + if err != nil { + r.Logger.Errorw("failed to check recipient group uniqueness", "error", err) + return nil, errs.Wrap(err) + } + if !isOK { + r.Logger.Debugw("recipient group is already taken", "name", name.String()) + return nil, validate.WrapErrorWithField(errors.New("is not unique"), "name") + } + // insert recipient group + recipientGroupID, err := r.RecipientGroupRepository.Insert( + ctx, + group, + ) + if err != nil { + r.Logger.Debugw("failed to create recipient group - failed to insert recipient group", + "error", err, + ) + + return nil, errs.Wrap(err) + } + ae.Details["id"] = recipientGroupID.String() + r.AuditLogAuthorized(ae) + + return recipientGroupID, nil +} + +// Import imports recipients into a recipient group +// if the recipient does not exists, it will be created and added to the group +// if the recipient exits, it will be updated and added to the group +func (r *RecipientGroup) Import( + ctx context.Context, + session *model.Session, + recipients []*model.Recipient, + ignoreOverwriteEmptyFields bool, + recipientGroupID *uuid.UUID, + companyID *uuid.UUID, +) error { + ae := NewAuditEvent("RecipientGroup.Import", session) + ae.Details["recipientGroupId"] = recipientGroupID.String() + if companyID != nil { + ae.Details["companyId"] = companyID.String() + } + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + r.LogAuthError(err) + return err + } + if !isAuthorized { + r.AuditLogNotAuthorized(ae) + return errs.ErrAuthorizationFailed + } + if len(recipients) == 0 { + return validate.WrapErrorWithField(errors.New("no recipients"), "add recipients") + } + // check that the recipient group exists + _, err = r.RecipientGroupRepository.GetByID( + ctx, + recipientGroupID, + &repository.RecipientGroupOption{}, + ) + if err != nil { + r.Logger.Debugw("failed to import recipients - failed to get recipient group", "error", err) + return err + } + recipientIDs, err := r.RecipientService.Import( + ctx, + session, + recipients, + ignoreOverwriteEmptyFields, + companyID, + ) + if err != nil { + return err + } + // add recpients to group + err = r.AddRecipients( + ctx, + session, + recipientGroupID, + recipientIDs, + ) + if err != nil { + r.Logger.Debugw("failed to import recipients - failed to add recipients to group", + "error", err, + ) + return err + } + r.AuditLogAuthorized(ae) + + return nil +} + +// GetByID returns a recipient group by ID +func (r *RecipientGroup) GetByID( + ctx context.Context, + session *model.Session, + id *uuid.UUID, + options *repository.RecipientGroupOption, +) (*model.RecipientGroup, error) { + ae := NewAuditEvent("RecipientGroup.GetByID", session) + ae.Details["id"] = id.String() + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + r.LogAuthError(err) + return nil, errs.Wrap(err) + } + if !isAuthorized { + r.AuditLogNotAuthorized(ae) + return nil, errs.ErrAuthorizationFailed + } + // get recipient group + recipientGroup, err := r.RecipientGroupRepository.GetByID( + ctx, + id, + options, + ) + if err != nil { + r.Logger.Errorw("failed to get recipient group by id - failed to get recipient group", + "error", err, + ) + return nil, errs.Wrap(err) + } + // no audit on read + + return recipientGroup, nil +} + +// GetByCompanyID returns recipient groups by company ID +func (r *RecipientGroup) GetByCompanyID( + ctx context.Context, + session *model.Session, + id *uuid.UUID, + options *repository.RecipientGroupOption, +) ([]*model.RecipientGroup, error) { + ae := NewAuditEvent("RecipientGroup.GetByCompanyID", session) + ae.Details["id"] = id.String() + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + r.LogAuthError(err) + return nil, errs.Wrap(err) + } + if !isAuthorized { + r.AuditLogNotAuthorized(ae) + return nil, errs.ErrAuthorizationFailed + } + // get recipient group + recipientGroups, err := r.RecipientGroupRepository.GetAllByCompanyID( + ctx, + id, + options, + ) + if err != nil { + r.Logger.Errorw("failed to get recipient groups by id - failed to get recipient group", + "error", err, + ) + return nil, errs.Wrap(err) + } + // no audit on read + + return recipientGroups, nil +} + +// GetAll returns all recipient groups using pagination +func (r *RecipientGroup) GetAll( + ctx context.Context, + session *model.Session, + companyID *uuid.UUID, // can be null + options *repository.RecipientGroupOption, +) (*model.Result[model.RecipientGroup], error) { + result := model.NewEmptyResult[model.RecipientGroup]() + ae := NewAuditEvent("RecipientGroup.GetAll", session) + if companyID != nil { + ae.Details["companyId"] = companyID.String() + } + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + r.LogAuthError(err) + return result, errs.Wrap(err) + } + if !isAuthorized { + r.AuditLogNotAuthorized(ae) + return result, errs.ErrAuthorizationFailed + } + // get recipient groups + result, err = r.RecipientGroupRepository.GetAll( + ctx, + companyID, + options, + ) + if err != nil { + r.Logger.Errorw("failed to get all recipient groups - failed to get all recipient groups", + "error", err, + ) + return result, errs.Wrap(err) + } + // no audit log on read + return result, nil +} + +// GetRecipientsByID returns all recipients of a recipient group +func (r *RecipientGroup) GetRecipientsByGroupID( + ctx context.Context, + session *model.Session, + id *uuid.UUID, + options *repository.RecipientOption, +) (*model.Result[model.Recipient], error) { + result := model.NewEmptyResult[model.Recipient]() + ae := NewAuditEvent("RecipientGroup.GetRecipientsByGroupID", session) + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + r.LogAuthError(err) + return result, errs.Wrap(err) + } + if !isAuthorized { + r.AuditLogNotAuthorized(ae) + return result, errs.ErrAuthorizationFailed + } + // get recipients + result, err = r.RecipientGroupRepository.GetRecipientsByGroupID( + ctx, + id, + options, + ) + if err != nil { + r.Logger.Errorw("failed to get recipients by id - failed to get recipients", "error", err) + return result, errs.Wrap(err) + } + // no audit on read + return result, nil +} + +// UpdateByID updates a recipient group by ID +func (r *RecipientGroup) UpdateByID( + ctx context.Context, + session *model.Session, + id *uuid.UUID, + incoming *model.RecipientGroup, +) error { + ae := NewAuditEvent("RecipientGroup.UpdateByID", session) + ae.Details["id"] = id.String() + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + r.LogAuthError(err) + return err + } + if !isAuthorized { + r.AuditLogNotAuthorized(ae) + return errs.ErrAuthorizationFailed + } + // get current + current, err := r.RecipientGroupRepository.GetByID( + ctx, + id, + &repository.RecipientGroupOption{}, + ) + if err != nil { + r.Logger.Errorw("failed to get recipient group", "error", err) + return err + } + if incoming.Name.IsSpecified() && !incoming.Name.IsNull() { + var companyID *uuid.UUID + if cid, err := current.CompanyID.Get(); err == nil { + companyID = &cid + } + name := incoming.Name.MustGet() + isOK, err := repository.CheckNameIsUnique( + ctx, + r.RecipientRepository.DB, + "recipient_groups", + name.String(), + companyID, + id, + ) + if err != nil { + r.Logger.Errorw("failed to check recipient group uniqueness", "error", err) + return err + } + if !isOK { + r.Logger.Debugw("recipient group is already taken", "name", name.String()) + return validate.WrapErrorWithField(errors.New("is not unique"), "name") + } + current.Name.Set(name) + } + // update recipient group + err = r.RecipientGroupRepository.UpdateByID( + ctx, + id, + current, + ) + if err != nil { + r.Logger.Errorw("failed to update recipient group by id - failed to update recipient group", + "error", err, + ) + return err + } + r.AuditLogAuthorized(ae) + + return nil +} + +// AddRecipients adds recipients to a recipient group +func (r *RecipientGroup) AddRecipients( + ctx context.Context, + session *model.Session, + groupID *uuid.UUID, + recipients []*uuid.UUID, +) error { + ae := NewAuditEvent("RecipientGroup.AddRecipients", session) + ae.Details["id"] = groupID.String() + ae.Details["recipientIds"] = repository.UUIDsToStrings(recipients) + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + r.LogAuthError(err) + return err + } + if !isAuthorized { + r.AuditLogNotAuthorized(ae) + return errs.ErrAuthorizationFailed + } + // ensure that the recipient group exists + group, err := r.RecipientGroupRepository.GetByID( + ctx, + groupID, + &repository.RecipientGroupOption{}, + ) + if err != nil { + r.Logger.Errorw("failed to add recipients - failed to get recipient group", "error", err) + return err + } + // check if the recipients can be added to group + for _, recipientID := range recipients { + recipient, err := r.RecipientRepository.GetByID( + ctx, + recipientID, + &repository.RecipientOption{}, + ) + if err != nil { + r.Logger.Errorw("failed to add recipients - failed to get recipient by id", + "error", err, + ) + return err + } + // if the group has a company ID then the recipients company ID must match + // unless the recipient has no company id as it is global + if v, err := group.CompanyID.Get(); err == nil { + // if the recipient company is set and does not match the groups + if recipient.CompanyID.IsSpecified() && !recipient.CompanyID.IsNull() && v.String() != recipient.CompanyID.MustGet().String() { + + r.Logger.Errorw("failed to add recipients - recipient company id does not match group id", + "error", err, + ) + return validate.WrapErrorWithField(errors.New("company id does not match group id"), "recipient") + } + } else { + // if the group does not have a company ID then the recipient must not have a company ID + if recipient.CompanyID.IsSpecified() && !recipient.CompanyID.IsNull() { + r.Logger.Errorw("failed to add recipients - recipient company id is not nil", "error", err) + return validate.WrapErrorWithField(errors.New("cant add recipient belonging to a company to a global group"), "recipient") + } + } + } + // add recipients to group + err = r.RecipientGroupRepository.AddRecipients( + ctx, + groupID, + recipients, + ) + if err != nil { + r.Logger.Errorw("failed to add recipients - failed to add recipients to group", "error", err) + return err + } + r.AuditLogAuthorized(ae) + + return nil +} + +// RemoveRecipients removes a recipient from a recipient group +func (r *RecipientGroup) RemoveRecipients( + ctx context.Context, + session *model.Session, + groupID *uuid.UUID, + recipientIDs []*uuid.UUID, +) error { + ae := NewAuditEvent("RecipientGroup.RemoveRecipients", session) + ae.Details["groupId"] = groupID.String() + ae.Details["recipientIds"] = repository.UUIDsToStrings(recipientIDs) + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + r.LogAuthError(err) + return err + } + if !isAuthorized { + r.AuditLogNotAuthorized(ae) + return errs.ErrAuthorizationFailed + } + // anonymize recipients in any recipient-campaign data + for _, recpID := range recipientIDs { + // if the recipient is in any active campaign, cancel the recipient sending + err = r.CampaignRecipientRepository.CancelInActiveCampaigns(ctx, recpID) + if err != nil { + r.Logger.Errorw("failed to cancel campaign recipient", "error", err) + return err + } + anonymizedID := uuid.New() + err = r.RecipientService.CampaignRecipientRepository.Anonymize( + ctx, + recpID, + &anonymizedID, + ) + if err != nil { + r.Logger.Errorw( + "failed to remove recipient - failed to anonymized recipiet campaign data", + "error", err, + ) + return err + } + err = r.CampaignRepository.AnonymizeCampaignEventsByRecipientID( + ctx, + recpID, + &anonymizedID, + ) + if err != nil { + r.Logger.Errorw("failed to anonymize campaign event", "error", err) + return err + } + } + // remove recipient from group + err = r.RecipientGroupRepository.RemoveRecipients( + ctx, + groupID, + recipientIDs, + ) + if err != nil { + r.Logger.Errorw("failed to remove recipient - failed to remove recipient from group", + "error", err, + ) + return err + } + r.AuditLogAuthorized(ae) + + return nil +} + +// DeleteByID deletes a recipient group by ID +func (r *RecipientGroup) DeleteByID( + ctx context.Context, + session *model.Session, + id *uuid.UUID, +) error { + ae := NewAuditEvent("RecipientGroup.DeleteByID", session) + ae.Details["id"] = id.String() + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + r.LogAuthError(err) + return err + } + if !isAuthorized { + r.AuditLogNotAuthorized(ae) + return errs.ErrAuthorizationFailed + } + // get all recipients in group + group, err := r.RecipientGroupRepository.GetByID( + ctx, + id, + &repository.RecipientGroupOption{ + WithRecipients: true, + }, + ) + if len(group.Recipients) > 0 { + // anonymize recipients in any recipient-campaign data + for _, recipient := range group.Recipients { + anonymizedID := uuid.New() + recpID := recipient.ID.MustGet() + + // if the recipient is in any active campaign, cancel the recipient sending + err = r.CampaignRecipientRepository.CancelInActiveCampaigns(ctx, &recpID) + if err != nil { + r.Logger.Errorw("failed to cancel campaign recipient", "error", err) + return err + } + err = r.RecipientService.CampaignRecipientRepository.Anonymize( + ctx, + &recpID, + &anonymizedID, + ) + if err != nil { + r.Logger.Errorw( + "failed to remove recipient - failed to anonymized recipiet campaign data", + "error", err, + ) + return err + } + err = r.CampaignRepository.AnonymizeCampaignEventsByRecipientID( + ctx, + &recpID, + &anonymizedID, + ) + if err != nil { + r.Logger.Errorw("failed to anonymize campaign event", "error", err) + return err + } + } + } + // remove group from campaign groups + err = r.CampaignRepository.RemoveCampaignRecipientGroupByGroupID( + ctx, + id, + ) + if err != nil { + r.Logger.Errorw( + "failed to delete group - failed remove group from campaign data", + "error", err, + ) + return err + } + // remove group and recipients from group + err = r.RecipientGroupRepository.DeleteByID( + ctx, + id, + ) + if err != nil { + r.Logger.Errorw("failed to delete recipient group by id - failed to delete recipient group", + "error", err, + ) + return err + } + r.AuditLogAuthorized(ae) + + return nil +} diff --git a/backend/service/session.go b/backend/service/session.go new file mode 100644 index 0000000..ffc4264 --- /dev/null +++ b/backend/service/session.go @@ -0,0 +1,267 @@ +package service + +import ( + "context" + "fmt" + "time" + + "github.com/go-errors/errors" + + "github.com/gin-gonic/gin" + "github.com/google/uuid" + "github.com/phishingclub/phishingclub/data" + "github.com/phishingclub/phishingclub/errs" + "github.com/phishingclub/phishingclub/model" + "github.com/phishingclub/phishingclub/repository" + "gorm.io/gorm" +) + +// Session is a service for Session +type Session struct { + Common + SessionRepository *repository.Session +} + +// GetSession returns a session if one exists associated with the request +// if the session exists it will extend the session expiry date +// else it will invalidate the session cookie if provided +// modifies the response headers +func (s *Session) GetAndExtendSession(g *gin.Context) (*model.Session, error) { + session, err := s.validateAndExtendSession(g) + hasErr := errors.Is(err, gorm.ErrRecordNotFound) || errors.Is(err, errs.ErrSessionCookieNotFound) + if hasErr { + return nil, errs.Wrap(err) + } + if err != nil { + // TODO audit log? if the error is because the session IP changed + s.Logger.Debugw("failed to validate and extend session", "error", err) + return nil, errs.Wrap(err) + } + return session, nil +} + +// GetByID returns a session by ID +func (s *Session) GetByID( + ctx context.Context, + sessionID *uuid.UUID, + options *repository.SessionOption, +) (*model.Session, error) { + session, err := s.SessionRepository.GetByID( + ctx, + sessionID, + options, + ) + if errors.Is(err, gorm.ErrRecordNotFound) { + return nil, gorm.ErrRecordNotFound + } + + return session, nil +} + +// GetSessionsByUserID returns all sessions by user ID +func (s *Session) GetSessionsByUserID( + ctx context.Context, + session *model.Session, + userID *uuid.UUID, + options *repository.SessionOption, +) (*model.Result[model.Session], error) { + result := model.NewEmptyResult[model.Session]() + ae := NewAuditEvent("Session.GetSessionsByUserID", session) + ae.Details["userID"] = userID.String() + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + s.LogAuthError(err) + return result, errs.Wrap(err) + } + if !isAuthorized { + s.AuditLogNotAuthorized(ae) + return result, errs.ErrAuthorizationFailed + } + // get all sessions by user ID + result, err = s.SessionRepository.GetAllActiveSessionByUserID( + ctx, + userID, + options, + ) + if errors.Is(err, gorm.ErrRecordNotFound) { + return result, gorm.ErrRecordNotFound + } + if err != nil { + s.Logger.Errorw("failed to get sessions by user ID", "error", err) + return result, errs.Wrap(err) + } + // no audit on read + + return result, nil +} + +// validateAndExtendSession returns a session if one exists associated with the request +func (s *Session) validateAndExtendSession(g *gin.Context) (*model.Session, error) { + cookie, err := g.Cookie(data.SessionCookieKey) + if err != nil { + return nil, errs.ErrSessionCookieNotFound + } + id, err := uuid.Parse(cookie) + if err != nil { + return nil, errs.Wrap(err) + } + // checks that the session is not expired + ctx := g.Request.Context() + session, err := s.SessionRepository.GetByID(ctx, &id, &repository.SessionOption{ + WithUser: true, + WithUserRole: true, + WithUserCompany: true, + }) + // there is a valid session cookie but no valid session, so we expire the session cookie + if errors.Is(err, gorm.ErrRecordNotFound) { + g.SetCookie( + data.SessionCookieKey, + "", + -1, + "/", + "", + false, + true, + ) + return nil, errs.Wrap(err) + } + if err != nil { + return nil, errs.Wrap(err) + } + // handle session and that IP has not changed + // if it has changed - we expire the session + sessionIP := session.IP + clientIP := g.ClientIP() + if session.IP != clientIP { + err := s.Expire(ctx, session.ID) + if err != nil { + return nil, fmt.Errorf( + "failed to expire session upon changed IP (%s != %s): %s", + sessionIP, + clientIP, + err, + ) + } + // audit log - session invliad due to IP change + ae := NewAuditEvent("Session.Renew", session) + ae.Details["reason"] = "IP changed" + ae.Details["previousIP"] = sessionIP + ae.Details["newIP"] = clientIP + s.AuditLogNotAuthorized(ae) + return nil, fmt.Errorf( + "session IP changed (%s != %s)", + sessionIP, + clientIP, + ) + } + // session is valid - update the session expiry date + session.Renew(model.SessionIdleTimeout) + err = s.SessionRepository.UpdateExpiry(ctx, session) + if err != nil { + return nil, fmt.Errorf("failed to update session expiry: %s", err) + } + + return session, nil +} + +// Create creates a new session +// no auth - anyone can create a session +func (s *Session) Create( + ctx context.Context, + user *model.User, + ip string, +) (*model.Session, error) { + now := time.Now() + expiredAt := now.Add(model.SessionIdleTimeout).UTC() + maxAgeAt := now.Add(model.SessionMaxAgeAt).UTC() + id := uuid.New() + newSession := &model.Session{ + ID: &id, + User: user, + IP: ip, + ExpiresAt: &expiredAt, + MaxAgeAt: &maxAgeAt, + } + sessionID, err := s.SessionRepository.Insert( + ctx, + newSession, + ) + if err != nil { + s.Logger.Errorw("failed to insert session when creating a new session", "error", err) + return nil, errs.Wrap(err) + } + createdSession, err := s.SessionRepository.GetByID( + ctx, + sessionID, + &repository.SessionOption{ + WithUser: true, + WithUserRole: true, + WithUserCompany: true, + }, + ) + if err != nil { + s.Logger.Errorw("failed to get session after creating it", "error", err) + return nil, errs.Wrap(err) + } + return createdSession, nil +} + +// Expire expires a session +func (s *Session) Expire( + ctx context.Context, + sessionID *uuid.UUID, +) error { + err := s.SessionRepository.Expire(ctx, sessionID) + if err != nil { + s.Logger.Errorw("failed to expire session", "error", err) + return err + } + return nil +} + +// ExpireAllByUserID expires all sessions by user ID +func (s *Session) ExpireAllByUserID( + ctx context.Context, + session *model.Session, + userID *uuid.UUID, +) error { + ae := NewAuditEvent("Session.ExpireAllByUserID", session) + ae.Details["userID"] = userID.String() + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + s.LogAuthError(err) + return err + } + if !isAuthorized { + s.AuditLogNotAuthorized(ae) + return errs.ErrAuthorizationFailed + } + if session.User == nil { + s.Logger.Errorw("failed to get user from session when expiring session", "error", err) + return err + } + sessions, err := s.SessionRepository.GetAllActiveSessionByUserID( + ctx, + userID, + &repository.SessionOption{}, + ) + if err != nil { + s.Logger.Errorw("failed to get user sessions when expiring session", "error", err) + return err + } + if len(sessions.Rows) == 0 { + s.Logger.Debugw("no sessions to remove", "userID", userID.String()) + } + for _, session := range sessions.Rows { + err = s.SessionRepository.Expire(ctx, session.ID) + if err != nil { + s.Logger.Errorw("failed a users expiring session", "error", err) + return err + } + } + s.AuditLogAuthorized(ae) + + return nil +} diff --git a/backend/service/smtpConfiguration.go b/backend/service/smtpConfiguration.go new file mode 100644 index 0000000..4921acb --- /dev/null +++ b/backend/service/smtpConfiguration.go @@ -0,0 +1,623 @@ +package service + +import ( + "context" + "crypto/tls" + "strings" + + "github.com/go-errors/errors" + + "github.com/gin-gonic/gin" + "github.com/google/uuid" + "github.com/phishingclub/phishingclub/build" + "github.com/phishingclub/phishingclub/data" + "github.com/phishingclub/phishingclub/errs" + "github.com/phishingclub/phishingclub/model" + "github.com/phishingclub/phishingclub/repository" + "github.com/phishingclub/phishingclub/validate" + "github.com/phishingclub/phishingclub/vo" + "github.com/wneessen/go-mail" + "gorm.io/gorm" +) + +// SMTPConfiguration is a SMTP configuration service +type SMTPConfiguration struct { + Common + SMTPConfigurationRepository *repository.SMTPConfiguration + CampaignTemplateService *CampaignTemplate +} + +// Create creates a new SMTP configuration +func (s *SMTPConfiguration) Create( + ctx context.Context, + session *model.Session, + conf *model.SMTPConfiguration, +) (*uuid.UUID, error) { + ae := NewAuditEvent("SMTPConfiguration.Create", session) + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + s.LogAuthError(err) + return nil, errs.Wrap(err) + } + if !isAuthorized { + s.AuditLogNotAuthorized(ae) + return nil, errs.ErrAuthorizationFailed + } + // validate data + if err := conf.Validate(); err != nil { + s.Logger.Errorw("failed to validate SMTP configuration", "error", err) + return nil, errs.Wrap(err) + } + // check uniqueness + var companyID *uuid.UUID + if cid, err := conf.CompanyID.Get(); err == nil { + companyID = &cid + } + + name := conf.Name.MustGet() + isOK, err := repository.CheckNameIsUnique( + ctx, + s.SMTPConfigurationRepository.DB, + "smtp_configurations", + name.String(), + companyID, + nil, + ) + if err != nil { + s.Logger.Errorw("failed to check SMTP uniqueness", "error", err) + return nil, errs.Wrap(err) + } + if !isOK { + s.Logger.Debugw("smtp configuration name is already taken", "name", name.String()) + return nil, validate.WrapErrorWithField(errors.New("is not unique"), "name") + } + // create config + id, err := s.SMTPConfigurationRepository.Insert( + ctx, + conf, + ) + if err != nil { + s.Logger.Errorw("failed to create SMTP configuration", "error", err) + return nil, errs.Wrap(err) + } + ae.Details["id"] = id.String() + s.AuditLogAuthorized(ae) + + return id, nil +} + +// GetAll gets SMTP configurations +func (s *SMTPConfiguration) GetAll( + ctx context.Context, + session *model.Session, + companyID *uuid.UUID, + options *repository.SMTPConfigurationOption, +) (*model.Result[model.SMTPConfiguration], error) { + result := model.NewEmptyResult[model.SMTPConfiguration]() + ae := NewAuditEvent("SMTPConfiguration.GetAll", session) + if companyID != nil { + ae.Details["companyId"] = companyID.String() + } + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + s.LogAuthError(err) + return result, errs.Wrap(err) + } + if !isAuthorized { + s.AuditLogNotAuthorized(ae) + return result, errs.ErrAuthorizationFailed + } + // get + result, err = s.SMTPConfigurationRepository.GetAll( + ctx, + companyID, + options, + ) + if err != nil { + s.Logger.Errorw("failed to get SMTP configurations", "error", err) + return result, errs.Wrap(err) + } + // no audit on read + return result, nil +} + +// GetByID gets a SMTP configuration by ID +func (s *SMTPConfiguration) GetByID( + ctx context.Context, + session *model.Session, + id *uuid.UUID, + options *repository.SMTPConfigurationOption, +) (*model.SMTPConfiguration, error) { + ae := NewAuditEvent("SMTPConfiguration.GetByID", session) + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + s.LogAuthError(err) + return nil, errs.Wrap(err) + } + if !isAuthorized { + s.AuditLogNotAuthorized(ae) + return nil, errs.ErrAuthorizationFailed + } + // get config + config, err := s.SMTPConfigurationRepository.GetByID( + ctx, + id, + options, + ) + if err != nil { + s.Logger.Errorw("failed to get SMTP configuration", "error", err) + return nil, errs.Wrap(err) + } + // no audit on read + + return config, nil +} + +// SendTestEmail tests a SMTP configuration by ID +func (s *SMTPConfiguration) SendTestEmail( + g *gin.Context, + session *model.Session, + id *uuid.UUID, + to *vo.Email, + from *vo.Email, +) error { + ae := NewAuditEvent("SMTPConfiguration.SendTestEmail", session) + ae.Details["id"] = id.String() + ae.Details["to-email"] = to.String() + + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + s.LogAuthError(err) + return err + } + if !isAuthorized { + s.AuditLogNotAuthorized(ae) + return errs.ErrAuthorizationFailed + } + smtpConfig, err := s.GetByID( + g, + session, + id, + &repository.SMTPConfigurationOption{ + WithHeaders: true, + }, + ) + if err != nil && !errors.Is(err, gorm.ErrRecordNotFound) { + s.Logger.Errorw("smtp configuration did not load", "error", err) + return err + } + smtpPort, err := smtpConfig.Port.Get() + if err != nil { + s.Logger.Errorw("failed to get smtp port", "error", err) + return err + } + smtpHost, err := smtpConfig.Host.Get() + if err != nil { + s.Logger.Errorw("failed to get smtp host", "error", err) + return err + } + smtpIgnoreCertErrors, err := smtpConfig.IgnoreCertErrors.Get() + if err != nil { + s.Logger.Errorw("failed to get smtp ignore cert errors", "error", err) + return err + } + m := mail.NewMsg(mail.WithNoDefaultUserAgent()) + err = m.EnvelopeFrom(from.String()) + if err != nil { + s.Logger.Errorw("failed to set envelope from", "error", err) + return err + } + // headers + err = m.From(from.String()) + if err != nil { + s.Logger.Errorw("failed to set mail header 'From'", "error", err) + return err + } + err = m.To(to.String()) + if err != nil { + s.Logger.Errorw("failed to set mail header 'To'", "error", err) + return err + } + if headers := smtpConfig.Headers; headers != nil { + for _, header := range headers { + key := header.Key.MustGet() + value := header.Value.MustGet() + m.SetGenHeader( + mail.Header(key.String()), + value.String(), + ) + } + } + m.Subject("Configuration Test") + m.SetBodyString("text/html", + `This is a test email to verify the SMTP configuration.`, + ) + // setup client + emailOptions := []mail.Option{ + mail.WithPort(smtpPort.Int()), + mail.WithTLSConfig( + &tls.Config{ + ServerName: smtpHost.String(), + // #nosec + InsecureSkipVerify: smtpIgnoreCertErrors, + // MinVersion: tls.VersionTLS12, + }, + ), + } + // setup authentication if provided + username, err := smtpConfig.Username.Get() + if err != nil { + s.Logger.Errorw("failed to get smtp username", "error", err) + return err + } + password, err := smtpConfig.Password.Get() + if err != nil { + s.Logger.Errorw("failed to get smtp password", "error", err) + return err + } + if un := username.String(); len(un) > 0 { + emailOptions = append( + emailOptions, + mail.WithUsername( + un, + ), + ) + if pw := password.String(); len(pw) > 0 { + emailOptions = append( + emailOptions, + mail.WithPassword( + pw, + ), + ) + } + } + // send mail + var mc *mail.Client + + // Try different authentication methods based on configuration + // If username is provided, use authentication; otherwise try without auth first + if un := username.String(); len(un) > 0 { + // Try CRAM-MD5 first when credentials are provided + emailOptionsCRAM5 := append(emailOptions, mail.WithSMTPAuth(mail.SMTPAuthCramMD5)) + mc, _ = mail.NewClient(smtpHost.String(), emailOptionsCRAM5...) + if build.Flags.Production { + mc.SetTLSPolicy(mail.TLSMandatory) + } else { + mc.SetTLSPolicy(mail.TLSOpportunistic) + } + err = mc.DialAndSendWithContext(g, m) + + // Check if it's an authentication error and try PLAIN auth + if err != nil && (strings.Contains(err.Error(), "535 ") || + strings.Contains(err.Error(), "534 ") || + strings.Contains(err.Error(), "538 ") || + strings.Contains(err.Error(), "CRAM-MD5") || + strings.Contains(err.Error(), "authentication failed")) { + s.Logger.Warnw("CRAM-MD5 authentication failed, trying PLAIN auth", "error", err) + emailOptionsBasic := emailOptions + if build.Flags.Production { + emailOptionsBasic = append(emailOptions, mail.WithSMTPAuth(mail.SMTPAuthPlain)) + } + mc, _ = mail.NewClient(smtpHost.String(), emailOptionsBasic...) + if build.Flags.Production { + mc.SetTLSPolicy(mail.TLSMandatory) + } else { + mc.SetTLSPolicy(mail.TLSOpportunistic) + } + err = mc.DialAndSendWithContext(g, m) + } + } else { + // No credentials provided, try without authentication (e.g., local postfix) + mc, _ = mail.NewClient(smtpHost.String(), emailOptions...) + if build.Flags.Production { + mc.SetTLSPolicy(mail.TLSMandatory) + } else { + mc.SetTLSPolicy(mail.TLSOpportunistic) + } + err = mc.DialAndSendWithContext(g, m) + + // If no-auth fails and we get an auth-related error, log it appropriately + if err != nil && (strings.Contains(err.Error(), "530 ") || + strings.Contains(err.Error(), "535 ") || + strings.Contains(err.Error(), "authentication required") || + strings.Contains(err.Error(), "AUTH")) { + s.Logger.Warnw("Server requires authentication but no credentials provided", "error", err) + } + } + if err != nil { + s.Logger.Errorw("failed to send test email", "error", err) + if m.HasSendError() { + s.Logger.Errorw("failed to send test email", "error", m.SendError()) + return m.SendError() + } + return err + } + s.AuditLogAuthorized(ae) + + return nil +} + +// GetByNameAndCompanyID gets a SMTP configuration by name +func (s *SMTPConfiguration) GetByNameAndCompanyID( + ctx context.Context, + session *model.Session, + name *vo.String127, + companyID *uuid.UUID, // is nullable + options *repository.SMTPConfigurationOption, +) (*model.SMTPConfiguration, error) { + ae := NewAuditEvent("SMTPConfiguration.GetByNameAndCompanyID", session) + if name != nil { + ae.Details["name"] = name.String() + } + if companyID != nil { + ae.Details["companyId"] = companyID + } + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + s.LogAuthError(err) + return nil, errs.Wrap(err) + } + if !isAuthorized { + s.AuditLogNotAuthorized(ae) + return nil, errs.ErrAuthorizationFailed + } + // get + config, err := s.SMTPConfigurationRepository.GetByNameAndCompanyID( + ctx, + name, + companyID, + options, + ) + if err != nil { + s.Logger.Errorw("failed to get SMTP configuration", "error", err) + return nil, errs.Wrap(err) + } + // no audit on read + + return config, nil +} + +// UpdateByID updates a SMTP configuration by ID +func (s *SMTPConfiguration) UpdateByID( + ctx context.Context, + session *model.Session, + id *uuid.UUID, + incoming *model.SMTPConfiguration, +) error { + ae := NewAuditEvent("SMTPConfiguration.UpdateByID", session) + ae.Details["id"] = id.String() + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + s.LogAuthError(err) + return err + } + if !isAuthorized { + s.AuditLogNotAuthorized(ae) + return errs.ErrAuthorizationFailed + } + // get current + current, err := s.SMTPConfigurationRepository.GetByID( + ctx, + id, + &repository.SMTPConfigurationOption{}, + ) + if errors.Is(err, gorm.ErrRecordNotFound) { + s.Logger.Errorw("SMTP configuration not found", "error", id) + return err + } + if err != nil { + s.Logger.Errorw("failed to update SMTP configuration", "error", err) + return err + } + // update config - if a field is present and not null, update it + if v, err := incoming.Name.Get(); err == nil { + var companyID *uuid.UUID + if cid, err := current.CompanyID.Get(); err == nil { + companyID = &cid + } + // check uniqueness + name := incoming.Name.MustGet() + isOK, err := repository.CheckNameIsUnique( + ctx, + s.SMTPConfigurationRepository.DB, + "smtp_configurations", + name.String(), + companyID, + id, + ) + if err != nil { + s.Logger.Errorw("failed to check SMTP uniqueness", "error", err) + return err + } + if !isOK { + s.Logger.Debugw("smtp configuration name is already taken", "name", name.String()) + return validate.WrapErrorWithField(errors.New("is not unique"), "name") + } + current.Name.Set(v) + + } + if v, err := incoming.Host.Get(); err == nil { + current.Host.Set(v) + } + if v, err := incoming.Port.Get(); err == nil { + current.Port.Set(v) + } + if v, err := incoming.Username.Get(); err == nil { + current.Username.Set(v) + } + if v, err := incoming.Password.Get(); err == nil { + current.Password.Set(v) + } + if v, err := incoming.IgnoreCertErrors.Get(); err == nil { + current.IgnoreCertErrors.Set(v) + } + if err := incoming.Validate(); err != nil { + s.Logger.Errorw("failed to update SMTP configuration", "error", err) + return err + } + // update + err = s.SMTPConfigurationRepository.UpdateByID( + ctx, + id, + current, + ) + if err != nil { + s.Logger.Errorw("failed to update SMTP configuration", "error", err) + return err + } + s.AuditLogAuthorized(ae) + + return nil +} + +// AddHeader adds a header to a SMTP configuration +func (s *SMTPConfiguration) AddHeader( + ctx context.Context, + session *model.Session, + smtpID *uuid.UUID, + header *model.SMTPHeader, +) (*uuid.UUID, error) { + ae := NewAuditEvent("SMTPConfiguration.AddHeader", session) + ae.Details["id"] = smtpID.String() + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + s.LogAuthError(err) + return nil, errs.Wrap(err) + } + if !isAuthorized { + s.AuditLogNotAuthorized(ae) + return nil, errs.ErrAuthorizationFailed + } + // ensure config exists + _, err = s.SMTPConfigurationRepository.GetByID( + ctx, + smtpID, + &repository.SMTPConfigurationOption{}, + ) + if errors.Is(err, gorm.ErrRecordNotFound) { + s.Logger.Debugw("SMTP configuration not found", "error", smtpID) + return nil, errs.Wrap(err) + } + if err != nil { + s.Logger.Errorw("failed to add header to SMTP configuration", "error", err) + return nil, errs.Wrap(err) + } + header.SmtpID.Set(*smtpID) + // validate header + if err := header.Validate(); err != nil { + s.Logger.Errorw("failed to validate SMTP header", "error", err) + return nil, errs.Wrap(err) + } + // save header to configuration + headerID, err := s.SMTPConfigurationRepository.AddHeader( + ctx, + header, + ) + if err != nil { + s.Logger.Errorw("failed to add header to SMTP configuration", "error", err) + return nil, errs.Wrap(err) + } + s.AuditLogAuthorized(ae) + + return headerID, nil +} + +// RemoveHeader removes a header from a SMTP configuration +func (s *SMTPConfiguration) RemoveHeader( + ctx context.Context, + session *model.Session, + smtpID *uuid.UUID, + headerID *uuid.UUID, +) error { + ae := NewAuditEvent("SMTPConfiguration.RemoveHeader", session) + ae.Details["smtpId"] = smtpID.String() + ae.Details["headerId"] = headerID.String() + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + s.LogAuthError(err) + return err + } + if !isAuthorized { + s.AuditLogNotAuthorized(ae) + return errs.ErrAuthorizationFailed + } + // get config, ensure config exists + _, err = s.SMTPConfigurationRepository.GetByID( + ctx, + smtpID, + &repository.SMTPConfigurationOption{}, + ) + if errors.Is(err, gorm.ErrRecordNotFound) { + s.Logger.Debugw("SMTP configuration not found", "error", smtpID) + return err + } + if err != nil { + s.Logger.Errorw("failed to remove header from SMTP configuration", "error", err) + return err + } + // remove header + err = s.SMTPConfigurationRepository.RemoveHeader( + ctx, + headerID, + ) + if err != nil { + s.Logger.Errorw("failed to remove header from SMTP configuration", "error", err) + return err + } + s.AuditLogAuthorized(ae) + + return nil +} + +// DeleteByID deletes a SMTP configuration by ID +// including all headers attached to it +func (s *SMTPConfiguration) DeleteByID( + ctx context.Context, + session *model.Session, + id *uuid.UUID, +) error { + ae := NewAuditEvent("SMTPConfiguration.DeleteByID", session) + ae.Details["id"] = id.String() + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + s.LogAuthError(err) + return err + } + if !isAuthorized { + s.AuditLogNotAuthorized(ae) + return errs.ErrAuthorizationFailed + } + // delete the relation from the campaign templates + err = s.CampaignTemplateService.RemoveSmtpBySmtpID( + ctx, + session, + id, + ) + if err != nil { + s.Logger.Errorw("failed to remove SMTP configuration relation from campaign templates", + "error", err, + ) + return err + } + // delete config + err = s.SMTPConfigurationRepository.DeleteByID( + ctx, + id, + ) + if err != nil { + s.Logger.Errorw("failed to delete SMTP configuration", "error", err) + return err + } + s.AuditLogAuthorized(ae) + + return nil +} diff --git a/backend/service/sso.go b/backend/service/sso.go new file mode 100644 index 0000000..b8a185f --- /dev/null +++ b/backend/service/sso.go @@ -0,0 +1,261 @@ +package service + +import ( + "context" + "encoding/json" + "fmt" + "io" + "net/http" + "strings" + + "github.com/AzureAD/microsoft-authentication-library-for-go/apps/confidential" + "github.com/gin-gonic/gin" + "github.com/go-errors/errors" + + "github.com/phishingclub/phishingclub/data" + "github.com/phishingclub/phishingclub/errs" + "github.com/phishingclub/phishingclub/model" + "github.com/phishingclub/phishingclub/sso" + "github.com/phishingclub/phishingclub/vo" +) + +type SSO struct { + Common + OptionsService *Option + UserService *User + SessionService *Session + MSALClient *confidential.Client +} + +type MsGraphUserInfo struct { + DisplayName string `json:"displayName"` // Full name + Email string `json:"mail"` // Primary email + UserPrincipalName string `json:"userPrincipalName"` // Often email or login + GivenName string `json:"givenName"` // First name + Surname string `json:"surname"` // Last name + ID string `json:"id"` // Unique Azure AD ID +} + +// Get is the auth protected method for getting SSO details +func (s *SSO) Get( + ctx context.Context, + session *model.Session, +) (*model.SSOOption, error) { + ae := NewAuditEvent("SSO.Get", session) + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + s.LogAuthError(err) + return nil, errs.Wrap(err) + } + if !isAuthorized { + s.AuditLogNotAuthorized(ae) + return nil, errs.ErrAuthorizationFailed + } + return s.GetSSOOptionWithoutAuth(ctx) +} + +// Upsert upserts SSO config it also replaces the in memory SSO configuration +func (s *SSO) Upsert( + ctx context.Context, + session *model.Session, + ssoOpt *model.SSOOption, +) error { + ae := NewAuditEvent("SSO.Upsert", session) + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + s.LogAuthError(err) + return errs.Wrap(err) + } + if !isAuthorized { + s.AuditLogNotAuthorized(ae) + return errs.ErrAuthorizationFailed + } + ssoOpt.Enabled = len(ssoOpt.ClientID.String()) > 0 && + len(ssoOpt.TenantID.String()) > 0 && + len(ssoOpt.ClientSecret.String()) > 0 + + // if the config is incomplete, we clear it + if !ssoOpt.Enabled { + ssoOpt.ClientID = *vo.NewEmptyOptionalString64() + ssoOpt.TenantID = *vo.NewEmptyOptionalString64() + ssoOpt.ClientSecret = *vo.NewEmptyOptionalString1024() + ssoOpt.RedirectURL = *vo.NewEmptyOptionalString1024() + } + opt, err := ssoOpt.ToOption() + if err != nil { + return errs.Wrap(err) + } + err = s.OptionsService.SetOptionByKey(ctx, session, opt) + if err != nil { + s.Logger.Errorw("failed to upsert sso option", "error", err) + return errs.Wrap(err) + } + s.AuditLogAuthorized(ae) + // replace the in memory msal client + if ssoOpt.Enabled { + s.MSALClient, err = sso.NewEntreIDClient(ssoOpt) + if err != nil { + return errs.Wrap(err) + } + } else { + s.MSALClient = nil + } + + return nil +} + +func (s *SSO) GetSSOOptionWithoutAuth(ctx context.Context) (*model.SSOOption, error) { + opt, err := s.OptionsService.GetOptionWithoutAuth(ctx, data.OptionKeyAdminSSOLogin) + if err != nil { + s.Logger.Errorw("failed to get sso option", + "key", data.OptionKeyAdminSSOLogin, + "error", err) + return nil, errs.Wrap(err) + } + ssoOpt, err := model.NewSSOOptionFromJSON([]byte(opt.Value.String())) + if err != nil { + s.Logger.Errorw("failed to unmarshall sso option", "error", err) + return nil, errs.Wrap(err) + } + return ssoOpt, nil +} + +func (s *SSO) EntreIDLogin(ctx context.Context) (string, error) { + // check if sso is enabled + ssoOpt, err := s.GetSSOOptionWithoutAuth(ctx) + if err != nil { + return "", err + } + if !ssoOpt.Enabled { + s.Logger.Debugf("SSO login URL visited but it is disabed") + return "", errs.Wrap(errs.ErrSSODisabled) + } + // the MSALCLient is set on application start up + // and when a upsert is done, replacing the old client with new details + if s.MSALClient == nil { + return "", errs.Wrap(errors.New("no MSAL client")) + } + authURL, err := s.MSALClient.AuthCodeURL( + ctx, + ssoOpt.ClientID.String(), + ssoOpt.RedirectURL.String(), + []string{"https://graph.microsoft.com/User.Read"}, + ) + if err != nil { + return "", errs.Wrap(err) + } + return authURL, nil +} + +// EntreIDCallBack checks if the callback is OK then requests user details from the graph API +func (s *SSO) HandlEntraIDCallback( + g *gin.Context, + code string, +) (*model.Session, error) { + ssoOpt, err := s.GetSSOOptionWithoutAuth(g) + if err != nil { + return nil, err + } + if !ssoOpt.Enabled { + return nil, errs.Wrap(errs.ErrSSODisabled) + } + if s.MSALClient == nil { + return nil, errors.New("no msal client in memory") + } + result, err := s.MSALClient.AcquireTokenByAuthCode( + context.Background(), + code, + ssoOpt.RedirectURL.String(), + []string{"User.Read"}, + ) + if err != nil { + return nil, errs.Wrap(err) + } + userInfo, err := s.getMsGraphMe(g, result.AccessToken) + if err != nil { + s.Logger.Debugw("failed to get /me graph info", "error", err) + return nil, err + } + // validate required fields + if userInfo.Email == "" && userInfo.UserPrincipalName == "" { + err := errors.New("no email provided from SSO") + s.Logger.Debugw("no email or userPrincipalName from SSO", "error", err) + return nil, errs.Wrap(err) + } + // determine email (prefer mail over UPN) + email := userInfo.Email + if email == "" { + email = userInfo.UserPrincipalName + } + // determine name + name := userInfo.DisplayName + if name == "" { + name = strings.TrimSpace(fmt.Sprintf("%s %s", userInfo.GivenName, userInfo.Surname)) + } + if name == "" { + // Fallback to email prefix if no name available + name = strings.Split(email, "@")[0] + } + userID, err := s.UserService.CreateFromSSO(g, name, email, userInfo.ID) + if err != nil { + return nil, errs.Wrap(err) + } + if userID == nil { + return nil, errs.Wrap(errors.New("user ID is unexpectedly nil")) + } + // get the user and create a session + user, err := s.UserService.GetByIDWithoutAuth(g, userID) + if err != nil { + s.Logger.Debugf("failed to get SSO user", "error", err) + return nil, errs.Wrap(err) + } + session, err := s.SessionService.Create(g, user, g.ClientIP()) + if err != nil { + s.Logger.Debugf("failed to create session from SSO", "error", err) + return nil, errs.Wrap(err) + } + return session, nil +} + +func (s *SSO) getMsGraphMe(ctx context.Context, accessToken string) (*MsGraphUserInfo, error) { + client := &http.Client{} + req, err := http.NewRequestWithContext(ctx, "GET", "https://graph.microsoft.com/v1.0/me", nil) + if err != nil { + return nil, errs.Wrap(err) + } + + req.Header.Add("Authorization", fmt.Sprintf("Bearer %s", accessToken)) + + resp, err := client.Do(req) + if err != nil { + return nil, errs.Wrap(err) + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK { + return nil, errs.Wrap(fmt.Errorf("graph API returned status %d", resp.StatusCode)) + } + + // Read and log raw response + body, err := io.ReadAll(resp.Body) + if err != nil { + return nil, errs.Wrap(err) + } + s.Logger.Debugw("Raw Microsoft Graph response", "body", string(body)) + + var userInfo MsGraphUserInfo + if err := json.Unmarshal(body, &userInfo); err != nil { + return nil, errs.Wrap(err) + } + + s.Logger.Debugw("Parsed user info", + "id", userInfo.ID, + "email", userInfo.Email, + "displayName", userInfo.DisplayName, + "userPrincipalName", userInfo.UserPrincipalName, + ) + + return &userInfo, nil +} diff --git a/backend/service/templateService.go b/backend/service/templateService.go new file mode 100644 index 0000000..b02268c --- /dev/null +++ b/backend/service/templateService.go @@ -0,0 +1,527 @@ +package service + +import ( + "bytes" + "fmt" + "html" + "html/template" + "io" + "math/rand" + "strings" + + "github.com/go-errors/errors" + "github.com/google/uuid" + "github.com/oapi-codegen/nullable" + "github.com/phishingclub/phishingclub/database" + "github.com/phishingclub/phishingclub/errs" + "github.com/phishingclub/phishingclub/model" + "github.com/phishingclub/phishingclub/utils" + "github.com/yeqown/go-qrcode/v2" +) + +const trackingPixelTemplate = "{{.Tracker}}" + +// TemplateService is for handling things related to +// templates such as websites, emails, etc. +type Template struct { + Common +} + +// CreateMailTemplate creates a new mail template +func (t *Template) CreateMail( + domainName string, + idKey string, + urlPath string, + campaignRecipient *model.CampaignRecipient, + email *model.Email, + apiSender *model.APISender, +) *map[string]any { + baseURL := "https://" + domainName + url := fmt.Sprintf( + "%s%s?%s=%s", + baseURL, + urlPath, + idKey, + campaignRecipient.ID.MustGet().String(), + ) + // set body + trackingPixelPath := fmt.Sprintf( + "%s/wf/open?upn=%s", + baseURL, + campaignRecipient.ID.MustGet().String(), + ) + trackingPixel := fmt.Sprintf( + "\"\"", + baseURL, + campaignRecipient.ID.MustGet().String(), + ) + // #nosec + trackingPixelMarkup := template.HTML(trackingPixel) + return t.newTemplateDataMap( + idKey, + baseURL, + url, + campaignRecipient.Recipient, + trackingPixelPath, + trackingPixelMarkup, + email, + apiSender, + ) +} + +// ApplyPageMock +func (t *Template) ApplyPageMock(content string) (*bytes.Buffer, error) { + // build response + domain := &database.Domain{ + Name: "example.test", + } + email := model.NewEmailExample() + campaignRecipientID := uuid.New() + recipient := model.NewRecipientExample() + urlIdentifier := &model.Identifier{ + Name: nullable.NewNullableWithValue( + "id", + ), + } + stateIdentifier := &model.Identifier{ + Name: nullable.NewNullableWithValue( + "state", + ), + } + campaignTemplate := &model.CampaignTemplate{ + URLIdentifier: urlIdentifier, + StateIdentifier: stateIdentifier, + } + return t.CreatePhishingPage( + domain, + email, + &campaignRecipientID, + recipient, + content, + campaignTemplate, + "stateParam", + "urlPath", + ) +} + +// CreateMailBody returns a rendered mail body to string +func (t *Template) CreateMailBody( + urlIdentifier string, + urlPath string, + domain *model.Domain, + campaignRecipient *model.CampaignRecipient, + email *model.Email, + apiSender *model.APISender, // can be nil +) (string, error) { + mailData := t.CreateMail( + domain.Name.MustGet().String(), + urlIdentifier, + urlPath, + campaignRecipient, + email, + apiSender, + ) + // parse and execute the mail content + mailContentTemplate := template.New("mailContent") + mailContentTemplate = mailContentTemplate.Funcs(TemplateFuncs()) + content, err := email.Content.Get() + if err != nil { + t.Logger.Errorw("failed to get email content", "error", err) + return "", errs.Wrap(err) + } + mailTemplate, err := mailContentTemplate.Parse(content.String()) + if err != nil { + t.Logger.Errorw("failed to parse body", "error", err) + return "", errs.Wrap(err) + } + var mailContent bytes.Buffer + if err := mailTemplate.Execute(&mailContent, mailData); err != nil { + t.Logger.Errorw("failed to execute mail template", "error", err) + return "", errs.Wrap(err) + } + (*mailData)["Content"] = mailContent.String() + var body bytes.Buffer + if err := mailContentTemplate.Execute(&body, mailData); err != nil { + t.Logger.Errorw("failed to execute body template", "error", err) + return "", errs.Wrap(err) + } + return body.String(), nil +} + +// CreatePhishingPage creates a new phishing page +func (t *Template) CreatePhishingPage( + domain *database.Domain, + email *model.Email, + campaignRecipientID *uuid.UUID, + recipient *model.Recipient, + contentToRender string, + campaignTemplate *model.CampaignTemplate, + stateParam string, + urlPath string, +) (*bytes.Buffer, error) { + w := bytes.NewBuffer([]byte{}) + id := campaignRecipientID.String() + baseURL := "https://" + domain.Name + if len(domain.Name) == 0 { + baseURL = "" + } + urlIdentifier := campaignTemplate.URLIdentifier.Name.MustGet() + stateIdentifier := campaignTemplate.StateIdentifier.Name.MustGet() + url := fmt.Sprintf("%s?%s=%s&%s=%s", baseURL, urlIdentifier, id, stateIdentifier, stateParam) + tmpl, err := template.New("page"). + Funcs(TemplateFuncs()). + Parse(contentToRender) + + if err != nil { + return w, fmt.Errorf("failed to parse page template: %s", err) + } + data := t.newTemplateDataMap( + id, + baseURL, + url, + recipient, + "", // trackingPixelPath + "", // trackingPixelMarkup + email, + nil, // apiSender + ) + err = tmpl.Execute(w, data) + if err != nil { + return w, fmt.Errorf("failed to execute page template: %s", err) + } + return w, nil +} + +// newTemplateDataMap creates a new data map for the templates +func (t *Template) newTemplateDataMap( + id string, + baseURL string, + url string, + recipient *model.Recipient, + trackingPixelPath string, + trackingPixelMarkup template.HTML, + email *model.Email, + apiSender *model.APISender, +) *map[string]any { + recipientFirstName := "" + if v, err := recipient.FirstName.Get(); err == nil { + recipientFirstName = v.String() + } + recipientLastName := "" + if v, err := recipient.LastName.Get(); err == nil { + recipientLastName = v.String() + } + recipientEmail := "" + if v, err := recipient.Email.Get(); err == nil { + recipientEmail = v.String() + } + recipientPhone := "" + if v, err := recipient.Phone.Get(); err == nil { + recipientPhone = v.String() + } + recipientExtraIdentifier := "" + if v, err := recipient.ExtraIdentifier.Get(); err == nil { + recipientExtraIdentifier = v.String() + } + recipientPosition := "" + if v, err := recipient.Position.Get(); err == nil { + recipientPosition = v.String() + } + recipientDepartment := "" + if v, err := recipient.Department.Get(); err == nil { + recipientDepartment = v.String() + } + recipientCity := "" + if v, err := recipient.City.Get(); err == nil { + recipientCity = v.String() + } + recipientCountry := "" + if v, err := recipient.Country.Get(); err == nil { + recipientCountry = v.String() + } + recipientMisc := "" + if v, err := recipient.Misc.Get(); err == nil { + recipientMisc = v.String() + } + mailHeaderFrom := "" + if v, err := email.MailHeaderFrom.Get(); err == nil { + mailHeaderFrom = v.String() + } + m := map[string]any{ + "rID": id, + "FirstName": recipientFirstName, + "LastName": recipientLastName, + "Email": recipientEmail, + "To": recipientEmail, // alias of Email + "Phone": recipientPhone, + "ExtraIdentifier": recipientExtraIdentifier, + "Position": recipientPosition, + "Department": recipientDepartment, + "City": recipientCity, + "Country": recipientCountry, + "Misc": recipientMisc, + "Tracker": trackingPixelMarkup, + "TrackingURL": trackingPixelPath, + // sender fields + "From": mailHeaderFrom, + // general fields + "BaseURL": baseURL, + "URL": url, + "APIKey": "", + "CustomField1": "", + "CustomField2": "", + "CustomField3": "", + "CustomField4": "", + } + if apiSender != nil { + m["APIKey"] = utils.NullableToString(apiSender.APIKey) + m["CustomField1"] = utils.NullableToString(apiSender.CustomField1) + m["CustomField2"] = utils.NullableToString(apiSender.CustomField2) + m["CustomField3"] = utils.NullableToString(apiSender.CustomField3) + m["CustomField4"] = utils.NullableToString(apiSender.CustomField4) + } + + return &m +} + +func (t *Template) AddTrackingPixel(content string) string { + if strings.Contains(content, trackingPixelTemplate) { + return content + } + + // handle empty or whitespace-only content + content = strings.TrimSpace(content) + if content == "" { + return content + } + + // If just plain text without any HTML, append + if !strings.Contains(content, "<") { + return content + trackingPixelTemplate + } + + // find the first main container tag (like div), case insensitive + startDiv := -1 + lowerContent := strings.ToLower(content) + if idx := strings.Index(lowerContent, " 0 && content[pos-1] != '\\' { + inQuote = false + quoteChar = 0 + } + } + pos++ + continue + } + + // skip everything if we're in a quote + if inQuote { + pos++ + continue + } + + if pos+4 <= len(content) && content[pos:pos+4] == "" { + inComment = false + pos += 3 + continue + } + if inComment { + pos++ + continue + } + + // case insensitive check for script and style + if pos+7 <= len(content) && strings.ToLower(content[pos:pos+7]) == "" { + inScript = false + } + if pos+6 <= len(content) && strings.ToLower(content[pos:pos+6]) == "" { + inStyle = false + } + + if inScript || inStyle { + pos++ + continue + } + + // case insensitive check for div tags + if pos+4 <= len(content) && strings.ToLower(content[pos:pos+4]) == "' { + tagLevel++ + break + } + } + } + if pos+6 <= len(content) && strings.ToLower(content[pos:pos+6]) == "" { + tagLevel-- + if tagLevel == 0 { + // Found the matching closing tag + return content[:pos] + trackingPixelTemplate + content[pos:] + } + } + pos++ + } + + // couldn't find a proper place, append + return content + trackingPixelTemplate +} + +func (t *Template) RemoveTrackingPixelFromContent(content string) string { + return strings.ReplaceAll(content, trackingPixelTemplate, "") +} + +func TemplateFuncs() template.FuncMap { + return template.FuncMap{ + "urlEscape": func(s string) string { + return template.URLQueryEscaper(s) + }, + "htmlEscape": func(s string) string { + return html.EscapeString(s) + }, + "randInt": func(n1, n2 int) (int, error) { + if n1 > n2 { + return 0, fmt.Errorf("first number must be less than or equal to second number") + } + // #nosec + return rand.Intn(n2-n1+1) + n1, nil + }, + "randAlpha": RandAlpha, + "qr": GenerateQRCode, + } +} + +func GenerateQRCode(args ...any) (template.HTML, error) { + if len(args) == 0 { + return "", errors.New("URL is required") + } + + url, ok := args[0].(string) + if !ok { + return "", errors.New("first argument must be a URL string") + } + + dotSize := 5 + if len(args) > 1 { + if size, ok := args[1].(int); ok && size > 0 { + dotSize = size + } + } + + var buf bytes.Buffer + qr, err := qrcode.New(url) + if err != nil { + return "", err + } + + writer := NewQRHTMLWriter(&buf, dotSize) + if err := qr.Save(writer); err != nil { + return "", err + } + // #nosec + return template.HTML(buf.String()), nil +} + +const alphaChar = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ" + +// RandAlpha returns a random string of the given length +func RandAlpha(length int) (string, error) { + if length > 32 { + return "", fmt.Errorf("length must be less than 32") + } + b := make([]byte, length) + for i := range b { + // #nosec + b[i] = alphaChar[rand.Intn(len(alphaChar))] + } + return string(b), nil +} + +type QRHTMLWriter struct { + w io.Writer + dotSize int +} + +func NewQRHTMLWriter(w io.Writer, dotSize int) *QRHTMLWriter { + if dotSize <= 0 { + dotSize = 10 + } + return &QRHTMLWriter{ + w: w, + dotSize: dotSize, + } +} + +func (q *QRHTMLWriter) Write(mat qrcode.Matrix) error { + if q.w == nil { + return errors.New("QR writer: writer not initialized") + } + + if _, err := fmt.Fprint(q.w, ``); err != nil { + return fmt.Errorf("failed to write table opening: %w", err) + } + + maxW := mat.Width() - 1 + mat.Iterate(qrcode.IterDirection_ROW, func(x, y int, v qrcode.QRValue) { + if x == 0 { + fmt.Fprint(q.w, "") + } + + color := "#FFFFFF" + if v.IsSet() { + color = "#000000" + } + + fmt.Fprintf(q.w, ``, + q.dotSize, q.dotSize, color, q.dotSize, q.dotSize, q.dotSize, q.dotSize) + + if x == maxW { + fmt.Fprint(q.w, "") + } + }) + + if _, err := fmt.Fprint(q.w, "
"); err != nil { + return fmt.Errorf("QR writer: failed to write table closing: %w", err) + } + + return nil +} + +func (q *QRHTMLWriter) Close() error { + if closer, ok := q.w.(io.Closer); ok { + return closer.Close() + } + return nil +} diff --git a/backend/service/update.go b/backend/service/update.go new file mode 100644 index 0000000..b720aca --- /dev/null +++ b/backend/service/update.go @@ -0,0 +1,536 @@ +package service + +import ( + "archive/tar" + "bytes" + "compress/gzip" + "context" + "crypto/ed25519" + "crypto/tls" + "encoding/json" + "io" + "net/http" + "os" + "path/filepath" + "strings" + "sync" + "syscall" + "time" + + "github.com/go-errors/errors" + "golang.org/x/mod/semver" + + "github.com/phishingclub/phishingclub/build" + "github.com/phishingclub/phishingclub/cache" + "github.com/phishingclub/phishingclub/data" + "github.com/phishingclub/phishingclub/embedded" + "github.com/phishingclub/phishingclub/errs" + "github.com/phishingclub/phishingclub/model" + "github.com/phishingclub/phishingclub/version" +) + +type Update struct { + Common + OptionService *Option + updateMutex sync.Mutex +} + +// GitHubRelease represents a GitHub release response +type GitHubRelease struct { + TagName string `json:"tag_name"` + Name string `json:"name"` + Assets []struct { + Name string `json:"name"` + BrowserDownloadURL string `json:"browser_download_url"` + } `json:"assets"` +} + +// UpdateDetails represents update information +type UpdateDetails struct { + LatestVersion string `json:"latestVersion"` + DownloadURL string `json:"downloadUrl"` + Message string `json:"message"` +} + +// CheckForUpdate returns if an update is ready and if installation +// supports the update to be performed from the application +func (u *Update) CheckForUpdate( + ctx context.Context, + session *model.Session, +) (bool, bool, error) { + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil { + u.LogAuthError(err) + return false, false, errs.Wrap(err) + } + if !isAuthorized { + // skip audit logging on this endpoint + return false, false, errors.New("unauthorized") + } + + // Check GitHub for latest release + updateAvailable, err := u.checkGitHubForUpdate() + if err != nil { + return false, false, errs.Wrap(err) + } + + // Check if using systemd (for update capability) + usingSystemd, err := u.OptionService.GetOption(ctx, session, data.OptionKeyUsingSystemd) + if err != nil { + return false, false, errs.Wrap(err) + } + + return updateAvailable, usingSystemd.Value.String() == data.OptionValueUsingSystemdYes, nil +} + +// CheckForUpdateCached returns if an update is ready based on cached data +// and if installation supports the update to be performed from the application +func (u *Update) CheckForUpdateCached( + ctx context.Context, + session *model.Session, +) (bool, bool, error) { + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil { + u.LogAuthError(err) + return false, false, errs.Wrap(err) + } + if !isAuthorized { + // skip audit logging on this endpoint + return false, false, errors.New("unauthorized") + } + + // Get cached update availability + updateAvailable := cache.IsUpdateAvailable() + + // Check if using systemd (for update capability) + usingSystemd, err := u.OptionService.GetOption(ctx, session, data.OptionKeyUsingSystemd) + if err != nil { + return false, false, errs.Wrap(err) + } + + return updateAvailable, usingSystemd.Value.String() == data.OptionValueUsingSystemdYes, nil +} + +// checkGitHubForUpdate checks GitHub releases API for newer version +func (u *Update) checkGitHubForUpdate() (bool, error) { + client := &http.Client{ + Timeout: 10 * time.Second, + } + + if !build.Flags.Production { + customTransport := &http.Transport{ + TLSClientConfig: &tls.Config{ + // #nosec + InsecureSkipVerify: true, + }, + } + client.Transport = customTransport + } + + req, err := http.NewRequest( + http.MethodGet, + "https://api.github.com/repos/phishingclub/phishingclub/releases/latest", + nil, + ) + if err != nil { + return false, errs.Wrap(err) + } + + req.Header.Set("Accept", "application/vnd.github.v3+json") + req.Header.Set("User-Agent", "PhishingClub-Client") + + resp, err := client.Do(req) + if err != nil { + return false, errs.Wrap(err) + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK { + return false, errors.New("unexpected response from GitHub API") + } + + var release GitHubRelease + if err := json.NewDecoder(resp.Body).Decode(&release); err != nil { + return false, errs.Wrap(err) + } + + currentVersion := version.GetSemver() + latestVersion := release.TagName + + // Compare versions + isNewer, err := u.CheckUpdateVersion(currentVersion, latestVersion) + if err != nil { + u.Logger.Errorw("version comparison failed", "error", err) + return false, nil // Don't fail on version comparison error + } + + // Cache the update availability + cache.SetUpdateAvailable(isNewer) + + u.Logger.Debugw("update check completed", + "current", currentVersion, + "latest", latestVersion, + "updateAvailable", isNewer) + + return isNewer, nil +} + +func (u *Update) GetUpdateDetails( + ctx context.Context, + session *model.Session, +) (*UpdateDetails, error) { + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil { + u.LogAuthError(err) + return nil, errs.Wrap(err) + } + if !isAuthorized { + // skip audit logging on this endpoint + return nil, errors.New("unauthorized") + } + + // Get latest release info from GitHub + client := &http.Client{ + Timeout: 10 * time.Second, + } + + if !build.Flags.Production { + customTransport := &http.Transport{ + TLSClientConfig: &tls.Config{ + // #nosec + InsecureSkipVerify: true, + }, + } + client.Transport = customTransport + } + + req, err := http.NewRequest( + http.MethodGet, + "https://api.github.com/repos/phishingclub/phishingclub/releases/latest", + nil, + ) + if err != nil { + return nil, errs.Wrap(err) + } + + req.Header.Set("Accept", "application/vnd.github.v3+json") + req.Header.Set("User-Agent", "PhishingClub-Client") + + resp, err := client.Do(req) + if err != nil { + return nil, errs.Wrap(err) + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK { + return nil, errors.New("unexpected response from GitHub API") + } + + var release GitHubRelease + if err := json.NewDecoder(resp.Body).Decode(&release); err != nil { + return nil, errs.Wrap(err) + } + + currentVersion := version.GetSemver() + isNewer, _ := u.CheckUpdateVersion(currentVersion, release.TagName) + + if !isNewer { + return nil, errs.ErrNoUpdateAvailable + } + + // Find the binary asset + var downloadURL string + for _, asset := range release.Assets { + if strings.Contains(asset.Name, ".tar.gz") { + downloadURL = asset.BrowserDownloadURL + break + } + } + + if downloadURL == "" { + return nil, errors.New("no downloadable binary found in latest release") + } + + return &UpdateDetails{ + LatestVersion: release.TagName, + DownloadURL: downloadURL, + Message: "Update available from GitHub releases", + }, nil +} + +// RunUpdate runs a software update +func (u *Update) RunUpdate( + ctx context.Context, + session *model.Session, +) error { + // Prevent concurrent updates + u.updateMutex.Lock() + defer u.updateMutex.Unlock() + + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil { + u.LogAuthError(err) + return errs.Wrap(err) + } + if !isAuthorized { + // skip audit logging on this endpoint + return errors.New("unauthorized") + } + + // Get update details + details, err := u.GetUpdateDetails(ctx, session) + if err != nil { + u.Logger.Errorw("failed to get update details", "error", err) + return errs.Wrap(err) + } + + // Download release from GitHub + req, err := http.NewRequest( + http.MethodGet, + details.DownloadURL, + nil, + ) + if err != nil { + u.Logger.Errorw("failed to create download request", "error", err) + return errs.Wrap(err) + } + + req.Header.Set("User-Agent", "PhishingClub-Client") + + client := &http.Client{ + Timeout: 60 * time.Second, // Longer timeout for downloads + } + + if !build.Flags.Production { + customTransport := &http.Transport{ + TLSClientConfig: &tls.Config{ + // #nosec + InsecureSkipVerify: true, + }, + } + client.Transport = customTransport + } + + resp, err := client.Do(req) + if err != nil { + u.Logger.Errorw("failed to download update", "error", err) + return errs.Wrap(err) + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK { + u.Logger.Errorw("unexpected response from downloading update", "statusCode", resp.StatusCode) + return errors.New("unexpected response from GitHub") + } + + currentVersion := version.GetSemver() + // Check that the version is greater than the current + // to protect against downgrade attacks + isNewer, err := u.CheckUpdateVersion(currentVersion, details.LatestVersion) + if err != nil { + u.Logger.Errorf("version comparison failed", "error", err) + // You might still want to proceed with the update + } else if !isNewer { + u.Logger.Infow("update is not newer than current version", + "current", currentVersion, + "latest", details.LatestVersion) + return errors.New("update version is not newer than current") + } + + // Get current executable path + execPath, err := os.Executable() + if err != nil { + u.Logger.Errorw("failed to get current executable path", "error", err) + return errs.Wrap(err) + } + execDir := filepath.Dir(execPath) + + // Get original permissions + fileInfo, err := os.Stat(execPath) + if err != nil { + u.Logger.Errorw("failed to get original file permissions", "error", err) + return errs.Wrap(err) + } + originalMode := fileInfo.Mode() + + // Create temp directory in same filesystem as executable + tmpDir, err := os.MkdirTemp(execDir, ".update-*") + if err != nil { + return errs.Wrap(err) + } + defer os.RemoveAll(tmpDir) + + // Extract and verify the update package + binaryPath, err := u.extractAndVerifyPackage(resp.Body, tmpDir) + if err != nil { + u.Logger.Errorw("failed to verify update package", "error", err) + return errs.Wrap(err) + } + + // if not production, we stop the upgrade process here + if !build.Flags.Production { + u.Logger.Infow("update verification successful (development mode - not installing)") + return nil + } + + // Create backup with atomic rename (same filesystem) + backupPath := execPath + ".bak" + if err := os.Rename(execPath, backupPath); err != nil { + return errs.Wrap(err) + } + + // Move new binary with atomic rename (same filesystem) + if err := os.Rename(binaryPath, execPath); err != nil { + // Restore from backup on failure + os.Rename(backupPath, execPath) + return errs.Wrap(err) + } + + // After rename, set the same permissions as the original executable + if err := os.Chmod(execPath, originalMode); err != nil { + u.Logger.Errorw("failed to set original permissions", "error", err) + return errs.Wrap(err) + } + + u.Logger.Infow("update completed successfully", "version", details.LatestVersion) + + // Schedule shutdown after a brief delay to allow HTTP response to be sent + go func() { + time.Sleep(1 * time.Second) + u.Logger.Infow("initiating shutdown after update") + pid := os.Getpid() + if process, err := os.FindProcess(pid); err == nil { + process.Signal(syscall.SIGTERM) + } + }() + + return nil +} + +// extractAndVerifyPackage extracts binary and signature from tar.gz and verifies the signature +func (u *Update) extractAndVerifyPackage(packageData io.Reader, tmpDir string) (string, error) { + // Read the entire package + var buf bytes.Buffer + if _, err := io.Copy(&buf, packageData); err != nil { + return "", errs.Wrap(err) + } + + // Create gzip reader + gzipReader, err := gzip.NewReader(bytes.NewReader(buf.Bytes())) + if err != nil { + return "", errs.Wrap(err) + } + defer gzipReader.Close() + + // Create tar reader + tarReader := tar.NewReader(gzipReader) + + var binaryPath, sigPath string + + // Extract files from archive + for { + header, err := tarReader.Next() + if err == io.EOF { + break + } + if err != nil { + return "", errs.Wrap(err) + } + + // Skip directories + if header.Typeflag != tar.TypeReg { + continue + } + + // Get the base filename + fileName := filepath.Base(header.Name) + outputPath := filepath.Join(tmpDir, fileName) + + // Create output file + outFile, err := os.Create(outputPath) + if err != nil { + return "", errs.Wrap(err) + } + + // Copy file content + if _, err := io.Copy(outFile, tarReader); err != nil { + outFile.Close() + return "", errs.Wrap(err) + } + outFile.Close() + + // Save paths based on file extension + if filepath.Ext(fileName) == ".sig" { + sigPath = outputPath + } else if fileName == "phishingclub" { + binaryPath = outputPath + } + } + + // Ensure we have both binary and signature + if binaryPath == "" || sigPath == "" { + return "", errors.New("update package is incomplete: missing binary or signature") + } + + // Verify the signature + if err := u.verifySignature(binaryPath, sigPath); err != nil { + return "", errs.Wrap(err) + } + + return binaryPath, nil +} + +// verifySignature verifies binary using Ed25519 +func (u *Update) verifySignature(binaryPath, sigPath string) error { + // Load binary data + binaryData, err := os.ReadFile(binaryPath) + if err != nil { + return errs.Wrap(err) + } + + // Load signature + signature, err := os.ReadFile(sigPath) + if err != nil { + return errs.Wrap(err) + } + + // Verify using Ed25519 + if !ed25519.Verify(embedded.SigningKey1, binaryData, signature) { + u.Logger.Infow("failed to verify update - trying backup") + if !ed25519.Verify(embedded.SigningKey2, binaryData, signature) { + return errors.New("signature verification failed") + } + } + + return nil +} + +func (u *Update) CheckUpdateVersion(currentVersion, latestVersion string) (bool, error) { + // The semver package expects versions to be prefixed with 'v' + current := ensureVPrefix(currentVersion) + latest := ensureVPrefix(latestVersion) + + // Validate versions + if !semver.IsValid(current) { + u.Logger.Errorw("invalid current version format", "version", currentVersion) + return false, errs.Wrap(errors.New("invalid current version format")) + } + + if !semver.IsValid(latest) { + u.Logger.Errorw("invalid latest version format", "version", latestVersion) + return false, errs.Wrap(errors.New("invalid latest version format")) + } + + // Compare - returns > 0 if latest is greater + return semver.Compare(latest, current) > 0, nil +} + +// ensureVPrefix ensures the version string has a 'v' prefix +func ensureVPrefix(version string) string { + if !strings.HasPrefix(version, "v") { + return "v" + version + } + return version +} diff --git a/backend/service/user.go b/backend/service/user.go new file mode 100644 index 0000000..329ec6a --- /dev/null +++ b/backend/service/user.go @@ -0,0 +1,1117 @@ +package service + +import ( + "context" + "crypto/sha256" + "crypto/subtle" + "fmt" + "strings" + + "github.com/go-errors/errors" + + "github.com/google/uuid" + "github.com/oapi-codegen/nullable" + "github.com/phishingclub/phishingclub/data" + "github.com/phishingclub/phishingclub/errs" + "github.com/phishingclub/phishingclub/model" + "github.com/phishingclub/phishingclub/password" + "github.com/phishingclub/phishingclub/random" + "github.com/phishingclub/phishingclub/repository" + "github.com/phishingclub/phishingclub/validate" + "github.com/phishingclub/phishingclub/vo" + "github.com/pquerna/otp" + "github.com/pquerna/otp/totp" + "gorm.io/gorm" +) + +// TOTPValues is TOTP related values +type TOTPValues struct { + Secret string + URL string + RecoveryCode string +} + +// User is a service for User +type User struct { + Common + UserRepository *repository.User + RoleRepository *repository.Role + CompanyRepository *repository.Company + PasswordVerifier *password.Argon2Verifier + PasswordHasher *password.Argon2Hasher +} + +// Create creates a new user +func (u *User) Create( + ctx context.Context, + session *model.Session, + newUser *model.UserUpsertRequest, +) (*uuid.UUID, error) { + ae := NewAuditEvent("User.Create", session) + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + u.LogAuthError(err) + return nil, errs.Wrap(err) + } + if !isAuthorized { + u.AuditLogNotAuthorized(ae) + return nil, errs.ErrAuthorizationFailed + } + // check if the username is already taken + _, err = u.UserRepository.GetByUsername( + ctx, + &newUser.Username, + &repository.UserOption{}, + ) + // if there is not record not found error, then thw username is already user + if err == nil { + u.Logger.Debugw("username is already taken", "username", newUser.Username.String()) + return nil, validate.WrapErrorWithField(errors.New("not unique"), "username") + } + if !errors.Is(err, gorm.ErrRecordNotFound) { + u.Logger.Errorw("failed to create user - failed to get user by username", "error", err) + return nil, errs.Wrap(err) + } + // check if the email is already taken + _, err = u.UserRepository.GetByEmail( + ctx, + &newUser.Email, + &repository.UserOption{}, + ) + if err == nil { + u.Logger.Debugw("email is already taken", "email", newUser.Email.String()) + + return nil, validate.WrapErrorWithField(errors.New("not unique"), "email") + } + if !errors.Is(err, gorm.ErrRecordNotFound) { + u.Logger.Errorw("failed to create user - failed to get user by email", "error", err) + return nil, errs.Wrap(err) + } + adminRole, err := u.RoleRepository.GetByName( + ctx, + data.RoleSuperAdministrator, + ) + if err != nil { + return nil, errs.Wrap(err) + } + // create + user := model.User{ + Username: nullable.NewNullableWithValue(newUser.Username), + Email: nullable.NewNullableWithValue(newUser.Email), + Name: nullable.NewNullableWithValue(newUser.Fullname), + RoleID: nullable.NewNullableWithValue(adminRole.ID), + } + passwdHash, err := u.PasswordHasher.Hash(newUser.Password.String()) + if err != nil { + u.Logger.Errorw("failed to create user - failed to hash password", "error", err) + return nil, errs.Wrap(err) + } + // validate + if err := user.Validate(); err != nil { + u.Logger.Debugw("failed to create user - failed to validate user", "error", err) + return nil, errs.Wrap(err) + } + // save the user + id, err := u.UserRepository.Insert( + ctx, + &user, + passwdHash, + "", + ) + if err != nil { + u.Logger.Errorw("failed to create user - failed to save user", "error", err) + return nil, errs.Wrap(err) + } + ae.Details["id"] = id.String() + u.AuditLogAuthorized(ae) + + return id, nil +} + +// CreateFromSSO create a users from SSO login flow +// if the user already exists it returns the ID +func (u *User) CreateFromSSO( + ctx context.Context, + name string, + email string, + externalID string, +) (*uuid.UUID, error) { + ae := NewAuditEvent("User.SSOCreate", nil) // TODO could be a system session + // check if user already exists by email + emailVO, err := vo.NewEmail(email) + if err != nil { + u.Logger.Debugw("failed to setup SSO user", "error", err) + return nil, errs.Wrap(err) + } + existingUser, err := u.UserRepository.GetByEmail( + ctx, + emailVO, + &repository.UserOption{}, + ) + if err != nil && !errors.Is(err, gorm.ErrRecordNotFound) { + u.Logger.Debugw("failed to setup SSO user: DB error", "error", err) + return nil, errs.Wrap(err) + } + if existingUser != nil { + // update the user to SSO by removing the password hash + // if they dont have a SSO id + ssoID, err := existingUser.SSOID.Get() + if err != nil { + u.Logger.Errorf("failed to update user to SSO", "error", err) + } + if len(ssoID) == 0 { + uid := existingUser.ID.MustGet() + err := u.UserRepository.UpdateUserToSSO(ctx, &uid, externalID) + if err != nil { + u.Logger.Errorf("failed to update user to SSO", "error", err) + return nil, errs.Wrap(err) + } + } + // User exists, return their ID + id := existingUser.ID.MustGet() + return &id, nil + } + // create username from email (part before @) + username := strings.Split(email, "@")[0] + // trim the username for non alpha numeric + // trim username for non alpha numeric characters + username = strings.Map( + func(r rune) rune { + if r >= 'a' && r <= 'z' || + r >= 'A' && r <= 'Z' || + r >= '0' && r <= '9' { + return r + } + return -1 + }, + username, + ) + usernameVO, err := vo.NewUsername(username) + if err != nil { + u.Logger.Debugw("failed to setup SSO user: username error", "error", err) + return nil, errs.Wrap(err) + } + // check if username exists, append a random string + count := 1 + baseUsername := username + for { + _, err := u.UserRepository.GetByUsername( + ctx, + usernameVO, + &repository.UserOption{}, + ) + if errors.Is(err, gorm.ErrRecordNotFound) { + break + } + ri, err := random.RandomIntN(4) + if err != nil { + u.Logger.Debugw("failed to setup SSO user: rand gen error", "error", err) + return nil, errs.Wrap(err) + } + usernameVO, err = vo.NewUsername(fmt.Sprintf("%s%d", baseUsername, ri)) + if err != nil { + return nil, errs.Wrap(err) + } + if count > 3 { + err := errors.New("too many attempts at creating username") + u.Logger.Debugw("failed to setup SSO user: username error", "error", err) + return nil, errs.Wrap(err) + } + count++ + } + nameVO, err := vo.NewUserFullname(name) + if err != nil { + return nil, errs.Wrap(err) + } + // generate a random password + /* + passwd, err := vo.NewReasonableLengthPasswordGenerated() + if err != nil { + u.Logger.Debugw("failed to setup SSO user: password generation", "error", err) + return nil, errs.Wrap(err) + } + hash, err := u.PasswordHasher.Hash(passwd.String()) + */ + // get role + adminRole, err := u.RoleRepository.GetByName( + ctx, + data.RoleSuperAdministrator, + ) + if err != nil { + return nil, errs.Wrap(err) + } + // create new user + user := model.User{ + Username: nullable.NewNullableWithValue(*usernameVO), + Email: nullable.NewNullableWithValue(*emailVO), + Name: nullable.NewNullableWithValue(*nameVO), + // Set default role - you might want to configure this + RoleID: nullable.NewNullableWithValue(adminRole.ID), + } + // insert user + id, err := u.UserRepository.Insert( + ctx, + &user, + "", //empty hash for MFA users + externalID, + ) + if err != nil { + return nil, errs.Wrap(err) + } + u.AuditLogAuthorized(ae) + + return id, nil +} + +// GetMaskedAPIKey gets a masked API user key +func (u *User) GetMaskedAPIKey( + ctx context.Context, + session *model.Session, + userID *uuid.UUID, +) (string, error) { + ae := NewAuditEvent("User.GetMaskedAPIKey", session) + ae.Details["userId"] = userID.String() + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + u.LogAuthError(err) + return "", errs.Wrap(err) + } + if !isAuthorized { + u.AuditLogNotAuthorized(ae) + return "", errs.ErrAuthorizationFailed + } + // get api key + apiKey, err := u.UserRepository.GetAPIKey(ctx, userID) + if err != nil { + u.Logger.Errorw("failed to get api key", "error", err) + return "", errs.Wrap(err) + } + masked := "" + if len(apiKey) > 4 { + masked = apiKey[0:4] + strings.Repeat("*", 28) + } + // no audit on read + + return masked, nil +} + +// GetAllAPIKeys gets all api keys as SHA256 +// THIS METHOD DOES NOT HAVE AUTH, USE WITH DISCRETION +func (s *User) GetAllAPIKeysSHA256( + ctx context.Context, +) ([]*model.APIUser, error) { + apiUsers := []*model.APIUser{} + // get api key + apiKeyAndIDMap, err := s.UserRepository.GetAllAPIKeys(ctx) + for apiKey, userID := range apiKeyAndIDMap { + hash := sha256.Sum256([]byte(apiKey)) + apiUsers = append( + apiUsers, + &model.APIUser{ + ID: userID, + APIKeyHash: hash, + }, + ) + } + if err != nil { + s.Logger.Errorw("failed to get all api keys", "error", err) + return apiUsers, errs.Wrap(err) + } + return apiUsers, nil +} + +// UpsertAPIKey creates/updates a user API key +func (s *User) UpsertAPIKey( + ctx context.Context, + session *model.Session, + userID *uuid.UUID, +) (string, error) { + ae := NewAuditEvent("User.UpsertAPIKey", session) + ae.Details["userId"] = userID.String() + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + s.LogAuthError(err) + return "", errs.Wrap(err) + } + if !isAuthorized { + s.AuditLogNotAuthorized(ae) + return "", errs.ErrAuthorizationFailed + } + key, err := random.GenerateRandomURLBase64Encoded(64) + if err != nil { + s.Logger.Errorw("failed to create api key - bad crypto", "error", err) + return "", errs.Wrap(err) + } + // upsert api key + err = s.UserRepository.UpsertAPIKey( + ctx, + userID, + key, + ) + if err != nil { + s.Logger.Errorw("failed set api key", "error", err) + return "", errs.Wrap(err) + } + s.AuditLogAuthorized(ae) + + return key, nil +} + +// RemoveAPIKey removes a users api key +func (u *User) RemoveAPIKey( + ctx context.Context, + session *model.Session, + userID *uuid.UUID, +) error { + ae := NewAuditEvent("User.RemoveAPIKey", session) + ae.Details["userId"] = userID.String() + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + u.LogAuthError(err) + return err + } + if !isAuthorized { + u.AuditLogNotAuthorized(ae) + return errs.ErrAuthorizationFailed + } + err = u.UserRepository.RemoveAPIKey(ctx, userID) + if err != nil { + u.Logger.Errorw("failed to remove api key", "error", err) + return err + } + u.AuditLogAuthorized(ae) + + return nil +} + +// UpdateByID updates a user by ID +// values to update are email, username and fullname +func (u *User) Update( + ctx context.Context, + session *model.Session, + userID *uuid.UUID, + incoming *model.User, +) error { + ae := NewAuditEvent("User.Update", session) + ae.Details["id"] = userID.String() + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + u.LogAuthError(err) + return err + } + if !isAuthorized { + u.AuditLogNotAuthorized(ae) + return errs.ErrAuthorizationFailed + } + // get user to be updated + current, err := u.UserRepository.GetByID(ctx, userID, &repository.UserOption{}) + if err != nil { + u.Logger.Errorw("failed to update user - failed to get user by id", "error", err) + return err + } + // check if the username is already taken + if username, err := incoming.Username.Get(); err == nil { + _, err = u.UserRepository.GetByUsername( + ctx, + &username, + &repository.UserOption{}, + ) + if err == nil && current.Username.MustGet().String() != username.String() { + u.Logger.Debugw("username is already taken", "username", username.String()) + return validate.WrapErrorWithField(errors.New("not unique"), "username") + } + if err != nil && !errors.Is(err, gorm.ErrRecordNotFound) { + u.Logger.Errorw("failed to update user - failed to get user by username", "error", err) + return err + } + } + // check if the is already taken + if email, err := incoming.Email.Get(); err == nil { + // check if the email is already taken + _, err = u.UserRepository.GetByEmail( + ctx, + &email, + &repository.UserOption{}, + ) + if err == nil && current.Email.MustGet().String() != incoming.Email.MustGet().String() { + u.Logger.Debugw("email is already taken", "email", email.String()) + return validate.WrapErrorWithField(errors.New("not unique"), "email") + } + + if err != nil && !errors.Is(err, gorm.ErrRecordNotFound) { + u.Logger.Errorw("failed to update user - failed to get user by email", "error", err) + return err + } + } + if v, err := incoming.Email.Get(); err == nil { + current.Email.Set(v) + } + if v, err := incoming.Username.Get(); err == nil { + current.Username.Set(v) + } + if v, err := incoming.Name.Get(); err == nil { + current.Name.Set(v) + } + // validate + if err := current.Validate(); err != nil { + u.Logger.Debugw("failed to update user - failed to validate user", "error", err) + return err + } + // update the user + err = u.UserRepository.UpdateByID( + ctx, + userID, + current, + ) + if err != nil { + u.Logger.Errorw("failed to update user - failed to update user", "error", err) + return err + } + u.AuditLogAuthorized(ae) + + return nil +} + +// GetAll gets all users +func (u *User) GetAll( + ctx context.Context, + session *model.Session, + options *repository.UserOption, +) (*model.Result[model.User], error) { + result := model.NewEmptyResult[model.User]() + ae := NewAuditEvent("User.GetAll", session) + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + u.LogAuthError(err) + return result, errs.Wrap(err) + } + if !isAuthorized { + u.AuditLogNotAuthorized(ae) + return result, errs.ErrAuthorizationFailed + } + result, err = u.UserRepository.GetAll(ctx, options) + if err != nil { + u.Logger.Errorw("failed to get all users - failed to get all users", "error", err) + return result, errs.Wrap(err) + } + // no audit on read + return result, nil +} + +// GetByID gets a user by ID +func (s *User) GetByID( + ctx context.Context, + session *model.Session, + userID *uuid.UUID, +) (*model.User, error) { + ae := NewAuditEvent("User.GetByID", session) + ae.Details["id"] = userID.String() + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + s.LogAuthError(err) + return nil, errs.Wrap(err) + } + if !isAuthorized { + s.AuditLogNotAuthorized(ae) + return nil, errs.ErrAuthorizationFailed + } + user, err := s.UserRepository.GetByID( + ctx, + userID, + &repository.UserOption{ + WithRole: true, + WithCompany: true, + }, + ) + if err != nil { + s.Logger.Errorw("failed to get user by id - failed to get user by id", "error", err) + return nil, errs.Wrap(err) + } + // no audit on read + + return user, nil +} + +// GetByIDWithoutAuth gets a user by ID without requiring auth +func (s *User) GetByIDWithoutAuth( + ctx context.Context, + userID *uuid.UUID, +) (*model.User, error) { + user, err := s.UserRepository.GetByID( + ctx, + userID, + &repository.UserOption{ + WithRole: true, + WithCompany: true, + }, + ) + if err != nil { + s.Logger.Errorw("failed to get user by id - failed to get user by id", "error", err) + return nil, errs.Wrap(err) + } + // no audit on read + + return user, nil +} + +// Delete deletes a user +func (u *User) Delete( + ctx context.Context, + session *model.Session, + userID *uuid.UUID, +) error { + ae := NewAuditEvent("User.Delete", session) + ae.Details["id"] = userID.String() + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + u.LogAuthError(err) + return err + } + if !isAuthorized { + u.AuditLogNotAuthorized(ae) + return errs.ErrAuthorizationFailed + } + if session.User.ID.MustGet().String() == userID.String() { + u.Logger.Debugw("Attempted to delete own user", "userID", userID.String()) + return errs.NewValidationError( + errors.New("Can not delete own user"), + ) + } + // delete the user + err = u.UserRepository.DeleteByID( + ctx, + userID, + ) + if err != nil { + u.Logger.Errorw("failed to delete user - failed to delete user", "error", err) + return err + } + u.AuditLogAuthorized(ae) + + return nil +} + +// SetupTOTP sets up TOTP for a user +// returns secret, url and error +func (u *User) SetupTOTP( + ctx context.Context, + session *model.Session, + password *vo.ReasonableLengthPassword, +) (*TOTPValues, error) { + ae := NewAuditEvent("User.SetupTOTP", session) + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + u.LogAuthError(err) + return nil, errs.Wrap(err) + } + if !isAuthorized { + u.AuditLogNotAuthorized(ae) + return nil, errs.ErrAuthorizationFailed + } + // check if the user is loaded in the session + user := session.User + if user == nil { + u.Logger.Error("user is not loaded in session") + return nil, errors.New("user is not loaded in session") + } + // check password + username := user.Username.MustGet() + hasValidPassword, err := u.CheckPassword( + ctx, + &username, + password, + ) + if err != nil || !hasValidPassword { + return nil, errs.ErrAuthenticationFailed + } + // generate OTP + email := user.Email.MustGet() + key, err := totp.Generate(totp.GenerateOpts{ + Issuer: "Phishing Club", + AccountName: email.String(), + Algorithm: otp.AlgorithmSHA1, + }) + if err != nil { + u.Logger.Errorw("failed to setup TOTP - failed to generate key", "error", err) + return nil, errs.Wrap(err) + } + rc, err := random.GenerateRandomURLBase64Encoded(24) + if err != nil { + u.Logger.Errorw("failed to setup TOTP - failed to generate recovery code", "error", err) + return nil, errs.Wrap(err) + } + // update user + userID := user.ID.MustGet() + err = u.UserRepository.SetupTOTP( + ctx, + &userID, + key.Secret(), + rc, + key.URL(), + ) + if err != nil { + u.Logger.Errorw("failed to setup TOTP - failed to update user", "error", err) + return nil, errs.Wrap(err) + } + u.AuditLogAuthorized(ae) + + // audit log + return &TOTPValues{ + Secret: key.Secret(), + URL: key.URL(), + RecoveryCode: rc, + }, nil +} + +// SetupCheckTOTP verifies a TOTP setup +func (u *User) SetupCheckTOTP( + ctx context.Context, + session *model.Session, + token *vo.String64, +) error { + ae := NewAuditEvent("User.SetupCheckTOTP", session) + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + u.LogAuthError(err) + return err + } + if !isAuthorized { + u.AuditLogNotAuthorized(ae) + return errs.ErrAuthorizationFailed + } + // get user from session + user := session.User + if user == nil { + u.Logger.Error("user is not loaded in session") + return errors.New("user is not loaded in session") + } + // check if the token is valid + // get the secret + userID := user.ID.MustGet() + secret, _, err := u.UserRepository.GetTOTP( + ctx, + &userID, + ) + if err != nil { + u.Logger.Errorw("failed to verify TOTP - failed to get TOTP", "error", err) + return err + } + // verify the token + u.Logger.Debug("verifying TOTP") + valid := totp.Validate(token.String(), secret) + if !valid { + u.Logger.Debug("failed to verify TOTP - invalid token") + return errs.ErrUserWrongTOTP + } + u.Logger.Debugw("Enabling MFA TOTP for user", "userID", userID) + // enable TOTP + err = u.UserRepository.EnableTOTP( + ctx, + &userID, + ) + if err != nil { + u.Logger.Errorw("failed to verify TOTP - failed to enable TOTP", "error", err) + return err + } + u.AuditLogAuthorized(ae) + + return nil +} + +// IsTOTPEnabled checks if TOTP is enabled +func (u *User) IsTOTPEnabled( + ctx context.Context, + session *model.Session, +) (bool, error) { + ae := NewAuditEvent("User.IsTOTPEnabled", session) + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + u.LogAuthError(err) + return false, errs.Wrap(err) + } + if !isAuthorized { + u.AuditLogNotAuthorized(ae) + return false, errs.ErrAuthorizationFailed + } + // get user from session + user := session.User + if user == nil { + u.Logger.Error("user is not loaded in session") + return false, errors.New("user is not loaded in session") + } + // check if TOTP is enabled + userID := user.ID.MustGet() + enabled, err := u.UserRepository.IsTOTPEnabled( + ctx, + &userID, + ) + if err != nil { + u.Logger.Errorw("failed to check if TOTP is enabled", "error", err) + return false, errs.Wrap(err) + } + // no audit on read + + return enabled, nil +} + +// IsTOTPEnabledByUserID checks if TOTP is enabled by user ID +// this method had no auth check, use with consideration +func (u *User) IsTOTPEnabledByUserID( + ctx context.Context, + userID *uuid.UUID, +) (bool, error) { + // check if TOTP is enabled + enabled, err := u.UserRepository.IsTOTPEnabled( + ctx, + userID, + ) + if err != nil { + u.Logger.Errorw("failed to check if TOTP is enabled", "error", err) + return false, errs.Wrap(err) + } + return enabled, nil +} + +// DisableTOTP disables TOTP +// without checking if the user privilige, use with consideration +func (u *User) DisableTOTP( + ctx context.Context, + userID *uuid.UUID, +) error { + err := u.UserRepository.RemoveTOTP( + ctx, + userID, + ) + if err != nil { + u.Logger.Errorw("failed to disable TOTP", "error", err) + return err + } + // TODO audit log successful TOTP disable + return nil +} + +// CheckTOTP verifies a TOTP token +func (u *User) CheckTOTP( + ctx context.Context, + userID *uuid.UUID, + token *vo.String64, +) error { + // get the secret + secret, _, err := u.UserRepository.GetTOTP( + ctx, + userID, + ) + if err != nil { + u.Logger.Errorw("failed to verify TOTP - failed to get TOTP", "error", err) + return err + } + // verify the token + valid := totp.Validate(token.String(), secret) + if !valid { + u.Logger.Debug("failed to verify TOTP - invalid token") + return errs.ErrUserWrongTOTP + } + return nil +} + +// AuthenticateUsernameWithPassword tests a username and password is correct +func (u *User) AuthenticateUsernameWithPassword( + ctx context.Context, + username string, + passwd string, + ip string, +) (*model.User, error) { + ae := NewAuditEvent("User.AuthenticateUsernameWithPassword", nil) + ae.IP = ip + ae.Details["username"] = username + // check the entities are valid before doing anything + usernameEntity, err := vo.NewUsername(username) + if err != nil { + u.Logger.Debugw("failed to authenticate - invalid username", "error", err) + return nil, errs.Wrap(err) + } + passwordEntity, err := vo.NewReasonableLengthPassword(passwd) + if err != nil { + u.Logger.Debugw("failed to authenticate - invalid password", "error", err) + return nil, errs.Wrap(err) + } + // retrieve only the password hash to minimize the timing attack window compared to + // pulling the user with all relations + passwordHash, err := u.UserRepository.GetPasswordHashByUsername( + ctx, + usernameEntity, + ) + errIsRecordNotFound := errors.Is(err, gorm.ErrRecordNotFound) + if err != nil && !errIsRecordNotFound { + u.Logger.Errorw("failed to authenticate - failed to get password hash", "error", err) + return nil, errs.Wrap(err) + } + // if user not found - we compare with a fake fakePass hash to mitigate risk of timing attacks + // or if the user is a SSO user + if errIsRecordNotFound || len(passwordHash) == 0 { + _, err = u.PasswordVerifier.Verify(passwd, password.DummyHash) + if err != nil { + u.Logger.Debugw("failed to verify dummy hash", "error", err) + } + return nil, gorm.ErrRecordNotFound + } + // veriy the hash in a constant time manner + verified, err := u.PasswordVerifier.Verify(passwordEntity.String(), passwordHash) + if err != nil { + u.Logger.Errorw("failed to verify password hash", "error", err) + return nil, errs.Wrap(err) + } + // if the password is not verifed, log it and return the error + if !verified { + u.AuditLogNotAuthorized(ae) + return nil, errs.ErrUserWrongPasword + } + // on successful login, retrieve the user with relations and send it back + user, err := u.UserRepository.GetByUsername( + ctx, + usernameEntity, + &repository.UserOption{ + WithRole: true, + WithCompany: true, + }, + ) + if err != nil { + u.Logger.Errorw("failed to get user by username after verifying login", "error", err) + return nil, errs.Wrap(err) + } + u.AuditLogAuthorized(ae) + + return user, nil +} + +// CheckPassword checks if a password is correct +func (u *User) CheckPassword( + ctx context.Context, + username *vo.Username, + password *vo.ReasonableLengthPassword, +) (bool, error) { + passwordHash, err := u.UserRepository.GetPasswordHashByUsername( + ctx, + username, + ) + if err != nil { + u.Logger.Errorw("failed to check password - failed to get password hash", "error", err) + return false, errs.Wrap(err) + } + verified, err := u.PasswordVerifier.Verify(password.String(), passwordHash) + if err != nil { + u.Logger.Errorw("failed to check password - failed to verify hash", "error", err) + return false, errs.Wrap(err) + } + return verified, nil +} + +// ChangePassword changes a user's password +func (u *User) ChangePassword( + ctx context.Context, + session *model.Session, + currentPassword *vo.ReasonableLengthPassword, + newPassword *vo.ReasonableLengthPassword, +) error { + ae := NewAuditEvent("User.ChangePassword", session) + // check if the current password is correct + user := session.User + if user == nil { + u.Logger.Error("user is not loaded in session") + return errors.New("user is not loaded in session") + } + username := user.Username.MustGet() + ae.Details["id"] = user.ID.MustGet().String() + ae.Details["username"] = username.String() + passwordHash, err := u.UserRepository.GetPasswordHashByUsername( + ctx, + &username, + ) + if err != nil { + u.Logger.Errorw("failed to change password - failed to get password hash", "error", err) + return err + } + verified, err := u.PasswordVerifier.Verify(currentPassword.String(), passwordHash) + if err != nil { + u.Logger.Errorw("failed to change password - failed to verify hash", "error", err) + return err + } + if !verified { + u.AuditLogNotAuthorized(ae) + return errs.ErrUserWrongPasword + } + // change the password + passwordHash, err = u.PasswordHasher.Hash(newPassword.String()) + if err != nil { + u.Logger.Errorw("failed to change password - failed to hash new password", "error", err) + return err + } + err = u.UserRepository.UpdatePasswordHashByUsername( + ctx, + &username, + passwordHash, + ) + if err != nil { + u.Logger.Errorw("failed to change password - failed to update password hash", "error", err) + return err + } + u.AuditLogAuthorized(ae) + + return nil +} + +// ChangeFullname changes a users fullname +func (u *User) ChangeFullname( + ctx context.Context, + session *model.Session, + userID *uuid.UUID, + newFullname *vo.UserFullname, +) (*vo.UserFullname, error) { + ae := NewAuditEvent("User.ChangeFullname", session) + ae.Details["id"] = userID.String() + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + u.LogAuthError(err) + return nil, errs.Wrap(err) + } + if !isAuthorized { + u.AuditLogNotAuthorized(ae) + return nil, errs.ErrAuthorizationFailed + } + // change full name + _, err = u.UserRepository.GetByID(ctx, userID, &repository.UserOption{}) + if errors.Is(err, gorm.ErrRecordNotFound) { + u.Logger.Debugw("failed to change fullname - user not found", "error", err) + return nil, errs.Wrap(err) + } + err = u.UserRepository.UpdateFullNameByID( + ctx, + userID, + newFullname, + ) + if err != nil { + u.Logger.Errorw("failed to change fullname - failed to update fullname", "error", err) + return nil, errs.Wrap(err) + } + u.AuditLogAuthorized(ae) + + return newFullname, nil +} + +// ChangeEmailAsAdministrator changes a user's email +// changes a users email without validating their email +func (u *User) ChangeEmailAsAdministrator( + ctx context.Context, + session *model.Session, + userID *uuid.UUID, + newEmail *vo.Email, +) (*vo.Email, error) { + ae := NewAuditEvent("User.ChangeEmailAsAdministrator", session) + ae.Details["id"] = userID.String() + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + u.LogAuthError(err) + return nil, errs.Wrap(err) + } + if !isAuthorized { + u.AuditLogNotAuthorized(ae) + return nil, errs.ErrAuthorizationFailed + } + current, err := u.UserRepository.GetByID(ctx, userID, &repository.UserOption{}) + if errors.Is(err, gorm.ErrRecordNotFound) { + u.Logger.Debug("failed to change email - user not found") + return nil, errs.Wrap(err) + } + if err != nil { + u.Logger.Debugw("failed to change email - failed to get user by id", "error", err) + return nil, errs.Wrap(err) + } + // update + current.Email.Set(*newEmail) + err = u.UserRepository.UpdateByID( + ctx, + userID, + current, + ) + if err != nil { + u.Logger.Errorw("failed to change email - failed to update email", "error", err) + return nil, errs.Wrap(err) + } + u.AuditLogAuthorized(ae) + + return newEmail, nil +} + +// ChangeUsername changes a user's username +func (u *User) ChangeUsername( + ctx context.Context, + session *model.Session, + userID *uuid.UUID, + newUsername *vo.Username, +) error { + ae := NewAuditEvent("User.ChangeUsername", session) + ae.Details["id"] = userID.String() + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + u.LogAuthError(err) + return err + } + if !isAuthorized { + u.AuditLogNotAuthorized(ae) + return errs.ErrAuthorizationFailed + } + current, err := u.UserRepository.GetByID(ctx, userID, &repository.UserOption{}) + if err != nil { + u.Logger.Debugw("failed to change username - failed to get user by id", "error", err) + return err + } + current.Username.Set(*newUsername) + err = u.UserRepository.UpdateByID( + ctx, + userID, + current, + ) + if err != nil { + u.Logger.Errorw("failed to change username - failed to update username", "error", err) + return err + } + u.AuditLogAuthorized(ae) + + return nil +} + +// CheckMFARecoveryCode checks if a recovery code is valid +// returns true if the recovery code is valid +func (u *User) CheckMFARecoveryCode( + ctx context.Context, + userID *uuid.UUID, + recoveryCode *vo.String64, +) (bool, error) { + dbRecoveryCodeHash, err := u.UserRepository.GetMFARecoveryCode( + ctx, + userID, + ) + if subtle.ConstantTimeCompare([]byte(recoveryCode.String()), []byte(dbRecoveryCodeHash)) != 1 { + u.Logger.Info("invalid recovery code") + return false, errs.ErrUserWrongRecoveryCode + } + if err != nil { + u.Logger.Errorw("failed to get recovery code", "error", err) + return false, errs.Wrap(err) + } + return true, nil +} diff --git a/backend/service/utils.go b/backend/service/utils.go new file mode 100644 index 0000000..55e9a58 --- /dev/null +++ b/backend/service/utils.go @@ -0,0 +1,93 @@ +package service + +import ( + "fmt" + + "github.com/phishingclub/phishingclub/errs" + "github.com/phishingclub/phishingclub/model" + "go.uber.org/zap" +) + +// Common holds commonly used service utils +type Common struct { + Logger *zap.SugaredLogger +} + +func (c *Common) LogAuthError(err error) { + c.Logger.Errorw("failed to check permission", "error", err) +} + +type AuditEvent struct { + Name string // ex. User.Create + IP string // ip of the user performing the action + UserID string // user performing the action + Authorized bool + Details map[string]interface{} +} + +func NewAuditEvent(name string, session *model.Session) *AuditEvent { + userID := "" + clientIP := "" + if session != nil { + if usr := session.User; usr != nil { + userID = usr.ID.MustGet().String() + } + clientIP = session.IP + } + return &AuditEvent{ + Name: name, + UserID: userID, + IP: clientIP, + Details: map[string]interface{}{}, + } +} + +func (c *Common) auditLog(ae *AuditEvent) { + c.Logger.Infow("audit", ae.LogFields()...) +} + +func (c *Common) AuditLogAuthorized(e *AuditEvent) { + e.Authorized = true + c.auditLog(e) +} + +func (c *Common) AuditLogNotAuthorized(e *AuditEvent) { + e.Authorized = false + c.auditLog(e) +} + +func isLoaded( + session *model.Session, +) (*model.User, *model.Role, error) { + user := session.User + if user == nil { + return nil, nil, fmt.Errorf("user is not loaded but required") + } + role := user.Role + if role == nil { + return nil, nil, fmt.Errorf("role is not loaded but required") + } + return user, role, nil +} + +// IsAuthorized checks if the session is authorized to perform the permission +func IsAuthorized( + session *model.Session, + permission string, +) (bool, error) { + _, role, err := isLoaded(session) + if err != nil { + return false, errs.Wrap(err) + } + return role.IsAuthorized(permission), nil +} + +func (ae *AuditEvent) LogFields() []interface{} { + return []interface{}{ + "name", ae.Name, + "ip", ae.IP, + "userId", ae.UserID, + "authorized", ae.Authorized, + "details", ae.Details, + } +} diff --git a/backend/service/version.go b/backend/service/version.go new file mode 100644 index 0000000..70f0281 --- /dev/null +++ b/backend/service/version.go @@ -0,0 +1,36 @@ +package service + +import ( + "context" + + "github.com/go-errors/errors" + + "github.com/phishingclub/phishingclub/data" + "github.com/phishingclub/phishingclub/errs" + "github.com/phishingclub/phishingclub/model" + "github.com/phishingclub/phishingclub/version" +) + +// Version is a service for application service +type Version struct { + Common +} + +// Get gets the application service +func (o *Version) Get( + ctx context.Context, + session *model.Session, +) (string, error) { + ae := NewAuditEvent("Version.Get", session) + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + o.LogAuthError(err) + return "", errs.Wrap(err) + } + if !isAuthorized { + o.AuditLogNotAuthorized(ae) + return "", errs.ErrAuthorizationFailed + } + return version.Get(), nil +} diff --git a/backend/service/webhook.go b/backend/service/webhook.go new file mode 100644 index 0000000..a172baa --- /dev/null +++ b/backend/service/webhook.go @@ -0,0 +1,394 @@ +package service + +import ( + "bytes" + "context" + "crypto/hmac" + "crypto/sha256" + "encoding/hex" + "encoding/json" + "io" + "net/http" + "time" + + "github.com/go-errors/errors" + + "github.com/google/uuid" + "github.com/phishingclub/phishingclub/data" + "github.com/phishingclub/phishingclub/errs" + "github.com/phishingclub/phishingclub/model" + "github.com/phishingclub/phishingclub/repository" + "github.com/phishingclub/phishingclub/validate" +) + +type Webhook struct { + Common + CampaignRepository *repository.Campaign + WebhookRepository *repository.Webhook +} + +func (w *Webhook) Create( + ctx context.Context, + session *model.Session, + webhook *model.Webhook, +) (*uuid.UUID, error) { + ae := NewAuditEvent("Webhook.Create", session) + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil { + w.LogAuthError(err) + return nil, errs.Wrap(err) + } + if !isAuthorized { + w.AuditLogNotAuthorized(ae) + return nil, errors.New("unauthorized") + } + // validate data + if err := webhook.Validate(); err != nil { + return nil, errs.Wrap(err) + } + // check uniqueness + var companyID *uuid.UUID + if cid, err := webhook.CompanyID.Get(); err == nil { + companyID = &cid + } + name := webhook.Name.MustGet() + isOK, err := repository.CheckNameIsUnique( + ctx, + w.WebhookRepository.DB, + "webhooks", + name.String(), + companyID, + nil, + ) + if err != nil { + w.Logger.Errorw("failed to check webhook uniqueness", "error", err) + return nil, errs.Wrap(err) + } + if !isOK { + w.Logger.Debugw("webhook name is already taken", "name", name.String()) + return nil, validate.WrapErrorWithField(errors.New("is not unique"), "name") + } + // insert + id, err := w.WebhookRepository.Insert(ctx, webhook) + if err != nil { + w.Logger.Errorw("failed to insert webhook", "error", err) + return nil, errs.Wrap(err) + } + ae.Details["id"] = id.String() + w.AuditLogAuthorized(ae) + + return id, nil +} + +// GetAll gets all webhooks +func (w *Webhook) GetAll( + ctx context.Context, + session *model.Session, + companyID *uuid.UUID, + options *repository.WebhookOption, +) (*model.Result[model.Webhook], error) { + result := model.NewEmptyResult[model.Webhook]() + ae := NewAuditEvent("Webhook.GetAll", session) + if companyID != nil { + ae.Details["companyId"] = companyID.String() + } + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + w.LogAuthError(err) + return result, errs.Wrap(err) + } + if !isAuthorized { + w.AuditLogNotAuthorized(ae) + return result, errs.ErrAuthorizationFailed + } + // get + result, err = w.WebhookRepository.GetAll(ctx, companyID, options) + if err != nil { + w.Logger.Errorw("failed to get webhooks", "error", err) + return result, errs.Wrap(err) + } + w.AuditLogAuthorized(ae) + + return result, nil +} + +// GetByID gets a webhook by id +func (w *Webhook) GetByID( + ctx context.Context, + session *model.Session, + id *uuid.UUID, +) (*model.Webhook, error) { + ae := NewAuditEvent("Webhook.GetByID", session) + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil { + w.LogAuthError(err) + return nil, errs.Wrap(err) + } + if !isAuthorized { + w.AuditLogNotAuthorized(ae) + return nil, errs.ErrAuthorizationFailed + } + // get + out, err := w.WebhookRepository.GetByID(ctx, id) + if err != nil { + w.Logger.Errorw("failed to get webhook", "error", err) + return out, errs.Wrap(err) + } + // no audit on read + + return out, nil +} + +// GetByCompanyID gets a webhooks by compnay id +func (w *Webhook) GetByCompanyID( + ctx context.Context, + session *model.Session, + companyID *uuid.UUID, +) ([]*model.Webhook, error) { + ae := NewAuditEvent("Webhook.GetByCompanyID", session) + if companyID != nil { + ae.Details["companyId"] = companyID.String() + } + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil { + w.LogAuthError(err) + return nil, errs.Wrap(err) + } + if !isAuthorized { + w.AuditLogNotAuthorized(ae) + return nil, errs.ErrAuthorizationFailed + } + // get + models, err := w.WebhookRepository.GetAllByCompanyID(ctx, companyID, &repository.WebhookOption{}) + if err != nil { + w.Logger.Errorw("failed to get webhooks", "error", err) + return models, errs.Wrap(err) + } + // no audit on read + + return models, nil +} + +// Update updates a webhook +func (w *Webhook) Update( + ctx context.Context, + session *model.Session, + id *uuid.UUID, + webhook *model.Webhook, +) error { + ae := NewAuditEvent("Webhook.Update", session) + ae.Details["id"] = id.String() + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil { + w.LogAuthError(err) + return err + } + if !isAuthorized { + w.AuditLogNotAuthorized(ae) + return errors.New("unauthorized") + } + // get current + current, err := w.WebhookRepository.GetByID(ctx, id) + if err != nil { + w.Logger.Errorw("failed to get webhook", "error", err) + return err + } + // update values + if v, err := webhook.Name.Get(); err == nil { + // check uniqueness + var companyID *uuid.UUID + if cid, err := webhook.CompanyID.Get(); err == nil { + companyID = &cid + } + + isOK, err := repository.CheckNameIsUnique( + ctx, + w.WebhookRepository.DB, + "webhooks", + v.String(), + companyID, + id, + ) + if err != nil { + w.Logger.Errorw("failed to check webhook uniqueness", "error", err) + return err + } + if !isOK { + w.Logger.Debugw("webhook name is already taken", "name", v.String()) + return validate.WrapErrorWithField(errors.New("is not unique"), "name") + } + current.Name.Set(v) + } + if v, err := webhook.URL.Get(); err == nil { + current.URL.Set(v) + } + if v, err := webhook.Secret.Get(); err == nil { + current.Secret.Set(v) + } + // update + err = w.WebhookRepository.UpdateByID(ctx, id, webhook) + if err != nil { + w.Logger.Errorw("failed to update webhook", "error", err) + return err + } + w.AuditLogAuthorized(ae) + + return nil +} + +// DeleteByID deletes a webhook +func (w *Webhook) DeleteByID( + ctx context.Context, + session *model.Session, + id *uuid.UUID, +) error { + ae := NewAuditEvent("Webhook.DeleteByID", session) + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil { + w.LogAuthError(err) + return err + } + if !isAuthorized { + w.AuditLogNotAuthorized(ae) + return errors.New("unauthorized") + } + // get campaigns afffected so we can remove webhoook from them + affectedCampaigns, err := w.CampaignRepository.GetByWebhookID( + ctx, + id, + ) + if err != nil { + w.Logger.Errorw("failed to get campaigns afffected by removing webhhook", "error", err) + return err + } + cids := []*uuid.UUID{} + for _, campaign := range affectedCampaigns { + cid := campaign.ID.MustGet() + cids = append(cids, &cid) + } + err = w.CampaignRepository.RemoveWebhookByCampaignIDs( + ctx, + cids, + ) + if err != nil { + w.Logger.Errorw("failed to remove web hook from campaigns", "error", err) + return err + } + // delete + err = w.WebhookRepository.DeleteByID(ctx, id) + if err != nil { + w.Logger.Errorw("failed to delete webhook", "error", err) + return err + } + w.AuditLogAuthorized(ae) + + return nil +} + +// SendTest sends a test webhook +func (w *Webhook) SendTest( + ctx context.Context, + session *model.Session, + id *uuid.UUID, +) (map[string]interface{}, error) { + ae := NewAuditEvent("Webhook.SendTest", session) + ae.Details["id"] = id.String() + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil { + w.LogAuthError(err) + return nil, errs.Wrap(err) + } + if !isAuthorized { + w.AuditLogNotAuthorized(ae) + return nil, errs.ErrAuthorizationFailed + } + w.Logger.Debugw("sending test webhook", "error", id) + // send + webhook, err := w.WebhookRepository.GetByID(ctx, id) + if err != nil { + w.Logger.Errorw("failed to get webhook", "error", err) + return nil, errs.Wrap(err) + } + now := time.Now() + request := WebhookRequest{ + Time: &now, + CampaignName: "Test Campaign", + Email: "test@webhook.test", + Event: "test", + } + data, err := w.Send(ctx, webhook, &request) + if err != nil { + w.Logger.Errorw("failed to send webhook", "error", err) + return nil, errs.Wrap(err) + } + w.AuditLogAuthorized(ae) + + return data, nil +} + +// Send sends a webhook request +func (w *Webhook) Send( + ctx context.Context, + webhook *model.Webhook, + request *WebhookRequest, +) (map[string]interface{}, error) { + reqCtx, reqCancel := context.WithTimeout(context.Background(), 3*time.Second) + defer func() { + reqCancel() + }() + requestJSON, err := json.Marshal(request) + if err != nil { + return nil, errs.Wrap(err) + } + requestJSONBuffer := bytes.NewBuffer(requestJSON) + url := webhook.URL.MustGet() + req, err := http.NewRequestWithContext(reqCtx, "POST", url.String(), requestJSONBuffer) + if err != nil { + return nil, errs.Wrap(err) + } + req.Header.Set("Content-Type", "application/json") + // hmac sign the request if secret is set + var signature = "UNSIGNED" + if secret, err := webhook.Secret.Get(); err == nil { + hasher := hmac.New(sha256.New, []byte(secret.String())) + _, err := hasher.Write(requestJSON) + if err != nil { + return nil, errs.Wrap(err) + } + signature = hex.EncodeToString(hasher.Sum(nil)) + } + req.Header.Set("X-SIGNATURE", signature) + req.Header.Add("User-Agent", "Go-http-client") + response, err := http.DefaultClient.Do(req) + if err != nil { + return nil, errs.Wrap(err) + } + data := map[string]interface{}{ + "code": response.StatusCode, + "status": response.Status, + } + // parse respone body + body, err := io.ReadAll(response.Body) + if err != nil { + w.Logger.Errorw("failed to read response body", "error", err) + return nil, errs.Wrap(err) + } + defer response.Body.Close() + data["body"] = string(body) + + return data, nil +} + +type WebhookRequest struct { + Time *time.Time `json:"time"` + CampaignName string `json:"campaignName"` + Email string `json:"email"` + Event string `json:"event"` +} diff --git a/backend/sso/entreID.go b/backend/sso/entreID.go new file mode 100644 index 0000000..662dd17 --- /dev/null +++ b/backend/sso/entreID.go @@ -0,0 +1,36 @@ +package sso + +import ( + "fmt" + + "github.com/AzureAD/microsoft-authentication-library-for-go/apps/confidential" + "github.com/go-errors/errors" + "github.com/phishingclub/phishingclub/errs" + "github.com/phishingclub/phishingclub/model" +) + +func NewEntreIDClient(sso *model.SSOOption) (*confidential.Client, error) { + if !sso.Enabled { + return nil, errs.Wrap(errs.ErrSSODisabled) + } + clientID := sso.ClientID.String() + tenantID := sso.TenantID.String() + clientSecret := sso.ClientSecret.String() + // Create credential from client secret + cred, err := confidential.NewCredFromSecret(clientSecret) + if err != nil { + return nil, errs.Wrap(errors.Errorf("failed setup ENTRE ID credentials: %s", err)) + } + url := fmt.Sprintf("https://login.microsoftonline.com/%s", tenantID) + + // Create the client + client, err := confidential.New( + url, + clientID, + cred, + ) + if err != nil { + return nil, errs.Wrap(errors.Errorf("failed setup ENTRE ID client: %w", err)) + } + return &client, nil +} diff --git a/backend/task/runner.go b/backend/task/runner.go new file mode 100644 index 0000000..e6fedbc --- /dev/null +++ b/backend/task/runner.go @@ -0,0 +1,201 @@ +package task + +import ( + "context" + "fmt" + "runtime/debug" + "sync" + "time" + + "github.com/phishingclub/phishingclub/errs" + "github.com/phishingclub/phishingclub/model" + "github.com/phishingclub/phishingclub/service" + "go.uber.org/zap" +) + +// MAX_PROCESSING_TICK_TIME is the maximum time a full round of processing may take. +const MAX_PROCESSING_TICK_TIME = 10 * time.Minute +const TASK_INTERVAL = 10 * time.Second +const SYSTEM_TASK_INTERVAL = 1 * time.Hour + +// Daemon is for running tasks in the background +// ex: sending emails etc.. +type Runner struct { + CampaignService *service.Campaign + UpdateService *service.Update + IsRunning bool + Logger *zap.SugaredLogger +} + +// Run starts the rask runner +// TODO implement a abort signal so things can be handled gracefully +// func (d *daemon) Run(abortSignal chan struct{}) { +func (d *Runner) Run( + ctx context.Context, + session *model.Session, +) { + defer func() { + if r := recover(); r != nil { + stack := debug.Stack() + d.Logger.Errorw("task runner panicked", "error", r, "stack", string(stack)) + d.Logger.Info("Restarting inline runner daemon in 5 seconds") + time.Sleep(5 * time.Second) + // Restart in a new goroutine to avoid recursive stack growth + go d.Run(ctx, session) + } + }() + // + d.Logger.Debug("task runner started") + // on the start of the next minute create a event loop that runs every minute + // this is to ensure that the daemon runs every minute + //lastFinishedAt := time.Now() + for { + now := time.Now() + + select { + case <-ctx.Done(): + d.Logger.Debugf("Task runner stopping due to signal") + return + default: + /* + if lastFinishedAt.Add(time.Minute).After(now) { + d.Logger.Warn("Last task took longer than a minute (processing tick) to complete") + } + d.Logger.Debugw("Task processing tick took", "error ,time.Since(now).Milliseconds()) + */ + // sleep until the next minute change (ex 12:00:00 -> 12:01:00 and not 12:00:31 -> 12:01:31) + // + nextTick := now.Truncate(TASK_INTERVAL).Add(TASK_INTERVAL) + + time.Sleep(time.Until(nextTick)) + // time.Sleep(time.Until(now.Truncate(time.Minute).Add(time.Minute))) + d.Process( + ctx, + session, + ) + } + } +} + +func (d *Runner) RunSystemTasks( + ctx context.Context, + session *model.Session, + wg *sync.WaitGroup, +) { + // catch panics + defer func() { + if r := recover(); r != nil { + stack := debug.Stack() + d.Logger.Errorw("task runner panicked", "error", r, "stack", string(stack)) + d.Logger.Info("Restarting inline system runner daemon in 5 seconds") + time.Sleep(5 * time.Second) + // Restart in a new goroutine to avoid recursive stack growth + go d.RunSystemTasks(ctx, session, nil) // Pass nil for wg to avoid double Done() + } + }() + initialRunCompleted := false + d.Logger.Debug("system task runner started") + for { + now := time.Now() + // first task is done immediately + d.ProcessSystemTasks( + ctx, + session, + ) + if !initialRunCompleted { + initialRunCompleted = true + wg.Done() + } + + select { + case <-ctx.Done(): + d.Logger.Debugf("System Task runner stopping due to signal") + return + default: + // time is not truncated to on the start of the next hour to avoid + // all servers calling back at the same moment + time.Sleep(time.Until(now.Add(SYSTEM_TASK_INTERVAL))) + } + } +} + +// runTask runs a task +func (d *Runner) runTask( + name string, + fn func() error, +) { + d.Logger.Debugw("task runner started", "name", name) + now := time.Now() + err := errs.Wrap(fn()) + if err != nil { + d.Logger.Errorw("task runner failed", "name", name, "error", err) + } + d.Logger.Debugw( + "task runner completed", + "name", name, + "duration", time.Since(now), + ) +} + +// Process processes the tasks +func (d *Runner) Process( + ctx context.Context, + session *model.Session, +) { + ctx, cancel := context.WithTimeoutCause( + ctx, + MAX_PROCESSING_TICK_TIME, + fmt.Errorf("Processing tasks took over %f minutes", MAX_PROCESSING_TICK_TIME.Minutes()), + ) + defer cancel() + // update campaigns that are closed + d.runTask("close campaigns", func() error { + return d.CampaignService.HandleCloseCampaigns( + ctx, + session, + ) + }) + // anonymize campaigns that are ready to be anonymized + d.runTask("anonymize campaigns", func() error { + return d.CampaignService.HandleAnonymizeCampaigns( + ctx, + session, + ) + }) + d.Logger.Debug("task runner started processing") + // send the next batch of messagess + d.runTask("send messages", func() error { + err := d.CampaignService.SendNextBatch( + ctx, + session, + ) + return errs.Wrap(err) + }) + d.Logger.Debug("task runner ended processing") +} + +// Process system tasks +func (d *Runner) ProcessSystemTasks( + ctx context.Context, + session *model.Session, +) { + ctx, cancel := context.WithTimeoutCause( + ctx, + MAX_PROCESSING_TICK_TIME, + errs.Wrap( + fmt.Errorf( + "Processing tasks took over %f minutes", MAX_PROCESSING_TICK_TIME.Minutes(), + ), + ), + ) + defer cancel() + // check for updates + d.runTask("system - check updates", func() error { + if d.UpdateService == nil { + d.Logger.Warn("UpdateService is nil, skipping update check") + return nil + } + _, _, err := d.UpdateService.CheckForUpdate(ctx, session) + return err + }) +} diff --git a/backend/test/test.csv b/backend/test/test.csv new file mode 100644 index 0000000..d80d8ee --- /dev/null +++ b/backend/test/test.csv @@ -0,0 +1,15 @@ +email,phone,extraIdentifier,name,position,department,city,country,misc +a@import-test.test,1234567890,extraIdentifier1,Name1,Position1,Department1,City1,Country1,Misc1 +b@import-test.test,1234567891,extraIdentifier2,Name2,Position2,Department2,City2,Country2,Misc2 +c@import-test.test,1234567892,extraIdentifier3,Name3,Position3,Department3,City3,Country3,Misc3 +d@import-test.test,1234567893,extraIdentifier4,Name4,Position4,Department4,City4,Country4,Misc4 +e@import-test.test,1234567894,extraIdentifier5,Name5,Position5,Department5,City5,Country5,Misc5 +d@import-test.test,1234567896,extraIdentifier6,Name6,Position6,Department6,City6,Country6,Misc6 +f@import-test.test,1234567897,extraIdentifier7,Name7,Position7,Department7,City7,Country7,Misc7 +g@import-test.test,1234567898,extraIdentifier8,Name8,Position8,Department8,City8,Country8,Misc8 +h@import-test.test,1234567899,extraIdentifier9,Name9,Position9,Department9,City9,Country9,Misc9 +i@import-test.test,1234567890,extraIdentifier10,Name10,Position10,Department10,City10,Country10,Misc10 +j@import-test.test,1234567891,extraIdentifier11,Name11,Position11,Department11,City11,Country11,Misc11 +k@import-test.test,1234567892,extraIdentifier12,Name12,Position12,Department12,City12,Country12,Misc12 +l@import-test.test,1234567893,extraIdentifier13,Name13,Position13,Department13,City13,Country13,Misc13 +m@import-test.test,1234567894,extraIdentifier14,Name14,Position14,Department14,City14,Country14,Misc14 diff --git a/backend/test/test2.csv b/backend/test/test2.csv new file mode 100644 index 0000000..a6b71a3 --- /dev/null +++ b/backend/test/test2.csv @@ -0,0 +1,5 @@ +email,phone,extraIdentifier,name,position,department,city,country,misc +a@import-test.test,1234567890,extraIdentifier1,NewName,Position1,Department1,City1,Country1,Misc1 +b@import-test.test,1234567891,extraIdentifier2,NewName2,PositionNew2,DepartmentNew2,CityNew2,Country2,Misc2 +n@import-test.test,1234567892,extraIdentifierX3,NameX3,PositionX1,DepartmentX3,CityX3,CountryX3,MiscX3 +o@import-test.test,1234567893,extraIdentifierX4,NameX4,PositionX4,DepartmentX4,CityX4,CountryX4,MiscX4 diff --git a/backend/test/test3.csv b/backend/test/test3.csv new file mode 100644 index 0000000..eaef7f4 --- /dev/null +++ b/backend/test/test3.csv @@ -0,0 +1,2 @@ +email,name +a@import-test.test,lol diff --git a/backend/testfiles/attachment.txt b/backend/testfiles/attachment.txt new file mode 100644 index 0000000..9562308 --- /dev/null +++ b/backend/testfiles/attachment.txt @@ -0,0 +1 @@ +Hi {{.FirstName}} Welcome to The Phishing Club! We are excited to have you here. Click here to get started. diff --git a/backend/testfiles/license.json b/backend/testfiles/license.json new file mode 100644 index 0000000..846265c --- /dev/null +++ b/backend/testfiles/license.json @@ -0,0 +1,9 @@ +{ + "version": "v1", + "company": "Phishing Club", + "key": "", + "validUntil": "2025-12-23T15:30:22.635975262Z", + "isValid": true, + "signature": "lr9fmt3LqacnLzmQ3TAg5P8lQJ0R9OwNjKGa1s6nLE0=", + "offlineActivated": true +} diff --git a/backend/testfiles/recipients-2.csv b/backend/testfiles/recipients-2.csv new file mode 100644 index 0000000..63b405b --- /dev/null +++ b/backend/testfiles/recipients-2.csv @@ -0,0 +1,51 @@ +Email,Phone,ExtraIdentifier,FirstName,LastName,Position,Department,City,Country,Misc +"alexander.schmidt@gogetters.test","+49-555-1191","EMP001","Alexander","Schmidt","Chief Technology Officer","Executive","Berlin","Germany","Digital Transformation" +"priya.sharma@gogetters.test","+91-555-2847","EMP002","Priya","Sharma","Machine Learning Engineer","AI","Bangalore","India","Deep Learning Expert" +"liam.thompson@gogetters.test","+44-555-8273","EMP003","Liam","Thompson","Cloud Architect","Infrastructure","Manchester","UK","Multi-cloud Strategy" +"sofia.andersson@gogetters.test","+46-555-9182","EMP004","Sofia","Andersson","Innovation Director","R&D","Stockholm","Sweden","Patent Portfolio" +"miguel.torres@gogetters.test","+34-555-2938","EMP005","Miguel","Torres","Senior Developer","Engineering","Madrid","Spain","Blockchain Lead" +"jun.wang@gogetters.test","+86-555-3847","EMP006","Jun","Wang","Data Architect","Data","Shanghai","China","Big Data Systems" +"sarah.miller@gogetters.test","+1-555-9273","EMP007","Sarah","Miller","Product Strategy","Product","San Francisco","USA","Product Innovation" +"ahmed.hassan@gogetters.test","+20-555-2837","EMP008","Ahmed","Hassan","Platform Engineer","Engineering","Dubai","UAE","Distributed Systems" +"emma.clarke@gogetters.test","+61-555-2938","EMP009","Emma","Clarke","Head of Design","Design","Sydney","Australia","Design Leadership" +"antoine.lefebvre@gogetters.test","+33-555-8472","EMP010","Antoine","Lefebvre","Tech Lead","Engineering","Paris","France","System Architecture" +"yuki.tanaka@gogetters.test","+81-555-9283","EMP011","Yuki","Tanaka","Robotics Engineer","Innovation","Tokyo","Japan","Automation Systems" +"maria.gonzalez@gogetters.test","+52-555-7362","EMP012","Maria","Gonzalez","Quality Director","QA","Mexico City","Mexico","Quality Standards" +"oliver.hansen@gogetters.test","+45-555-2837","EMP013","Oliver","Hansen","DevSecOps Lead","Security","Copenhagen","Denmark","Security Integration" +"ana.silva@gogetters.test","+351-555-9273","EMP014","Ana","Silva","Mobile Lead","Development","Lisbon","Portugal","Cross-platform Apps" +"dmitry.kuznetsov@gogetters.test","+7-555-8273","EMP015","Dmitry","Kuznetsov","Backend Architect","Engineering","Moscow","Russia","Scaling Expert" +"chiara.romano@gogetters.test","+39-555-2938","EMP016","Chiara","Romano","UX Research Lead","Design","Milan","Italy","User Behavior" +"sean.o'brien@gogetters.test","+353-555-8472","EMP017","Sean","O'Brien","Sales Engineer","Sales","Dublin","Ireland","Technical Sales" +"marta.kovac@gogetters.test","+385-555-9283","EMP018","Marta","Kovac","Digital Marketing","Marketing","Zagreb","Croatia","Growth Hacking" +"thomas.wagner@gogetters.test","+43-555-7362","EMP019","Thomas","Wagner","Systems Engineer","Operations","Vienna","Austria","Process Automation" +"carmen.ruiz@gogetters.test","+56-555-2837","EMP020","Carmen","Ruiz","Integration Specialist","Engineering","Santiago","Chile","API Architecture" +"viktor.petrov@gogetters.test","+359-555-9273","EMP021","Viktor","Petrov","Blockchain Developer","Engineering","Sofia","Bulgaria","Smart Contracts" +"lisa.berg@gogetters.test","+47-555-8273","EMP022","Lisa","Berg","AI Research","Research","Oslo","Norway","Neural Networks" +"marco.bianchi@gogetters.test","+41-555-2938","EMP023","Marco","Bianchi","Technical Director","Engineering","Zurich","Switzerland","Tech Strategy" +"ravi.kumar@gogetters.test","+91-555-8472","EMP024","Ravi","Kumar","Performance Engineer","Engineering","Hyderabad","India","Optimization" +"wei.chen@gogetters.test","+86-555-9283","EMP025","Wei","Chen","ML Operations","AI","Beijing","China","ML Infrastructure" +"min-ji.kim@gogetters.test","+82-555-7362","EMP026","Min-ji","Kim","Platform Lead","Platform","Seoul","South Korea","Platform Strategy" +"lucas.santos@gogetters.test","+55-555-2837","EMP027","Lucas","Santos","Security Architect","Security","São Paulo","Brazil","Zero Trust" +"emma.virtanen@gogetters.test","+358-555-9273","EMP028","Emma","Virtanen","IoT Specialist","IoT","Helsinki","Finland","Connected Devices" +"james.wilson@gogetters.test","+1-555-8273","EMP029","James","Wilson","Cloud Security","Security","Toronto","Canada","SecOps Lead" +"elena.popov@gogetters.test","+7-555-2938","EMP030","Elena","Popov","Data Science Lead","Data","St Petersburg","Russia","Predictive Models" +"michael.weber@gogetters.test","+49-555-8472","EMP031","Michael","Weber","Automation Lead","Engineering","Munich","Germany","RPA Expert" +"sophie.martin@gogetters.test","+33-555-9283","EMP032","Sophie","Martin","UI Architecture","Frontend","Lyon","France","Micro-frontends" +"takeshi.yamamoto@gogetters.test","+81-555-7362","EMP033","Takeshi","Yamamoto","Mobile Architect","Mobile","Osaka","Japan","Native Apps" +"laura.schmidt@gogetters.test","+49-555-2837","EMP034","Laura","Schmidt","Knowledge Manager","Operations","Hamburg","Germany","Documentation" +"pedro.silva@gogetters.test","+351-555-9273","EMP035","Pedro","Silva","Release Manager","DevOps","Porto","Portugal","CI/CD Expert" +"eva.novotna@gogetters.test","+420-555-8273","EMP036","Eva","Novotna","Agile Coach","Management","Prague","Czech Republic","Scrum Master" +"alessandro.conti@gogetters.test","+39-555-2938","EMP037","Alessandro","Conti","Network Architect","Infrastructure","Rome","Italy","SDN Expert" +"zoe.taylor@gogetters.test","+64-555-8472","EMP038","Zoe","Taylor","Solutions Lead","Solutions","Wellington","New Zealand","Enterprise Solutions" +"adam.kowalski@gogetters.test","+48-555-9283","EMP039","Adam","Kowalski","Integration Lead","Integration","Warsaw","Poland","EAI Specialist" +"fatima.al-sayed@gogetters.test","+973-555-7362","EMP040","Fatima","Al-Sayed","Innovation Lead","Innovation","Manama","Bahrain","Digital Innovation" +"david.anderson@gogetters.test","+1-555-2837","EMP041","David","Anderson","API Architect","Architecture","Seattle","USA","API Strategy" +"nina.larsson@gogetters.test","+46-555-9273","EMP042","Nina","Larsson","Testing Lead","QA","Gothenburg","Sweden","Test Automation" +"gabriel.santos@gogetters.test","+55-555-8273","EMP043","Gabriel","Santos","Cloud Native","Cloud","Rio de Janeiro","Brazil","Kubernetes Expert" +"anna.kowalczyk@gogetters.test","+48-555-2938","EMP044","Anna","Kowalczyk","Frontend Lead","Frontend","Krakow","Poland","Web Performance" +"mohammed.ali@gogetters.test","+971-555-8472","EMP045","Mohammed","Ali","Security Ops","Security","Abu Dhabi","UAE","SOC Manager" +"ines.rodriguez@gogetters.test","+34-555-9283","EMP046","Ines","Rodriguez","Data Engineer","Data","Barcelona","Spain","Data Pipeline" +"kevin.zhang@gogetters.test","+86-555-7362","EMP047","Kevin","Zhang","Site Reliability","SRE","Shenzhen","China","Platform Reliability" +"julia.eriksson@gogetters.test","+46-555-2837","EMP048","Julia","Eriksson","UX Lead","Design","Malmo","Sweden","Design Systems" +"thomas.dubois@gogetters.test","+32-555-9273","EMP049","Thomas","Dubois","Tech Support Lead","Support","Brussels","Belgium","Enterprise Support" +"natalia.ivanova@gogetters.test","+7-555-8273","EMP050","Natalia","Ivanova","Software Architect","Architecture","Kazan","Russia","System Design" diff --git a/backend/testfiles/recipients.csv b/backend/testfiles/recipients.csv new file mode 100644 index 0000000..90fbee5 --- /dev/null +++ b/backend/testfiles/recipients.csv @@ -0,0 +1,51 @@ +Email,Phone,ExtraIdentifier,FirstName,LastName,Position,Department,City,Country,Misc +"marcus.jenkins@importers.test","+1-555-0191","EMP001","Marcus","Jenkins","Solutions Architect","Engineering","Portland","USA","Kubernetes Expert" +"elena.popov@importers.test","+7-916-555-0847","EMP002","Elena","Popov","Data Scientist","Analytics","Moscow","Russia","ML Specialist" +"raj.patel@importers.test","+91-555-8273","EMP003","Raj","Patel","DevOps Engineer","Operations","Mumbai","India","Docker Certified" +"hannah.berg@importers.test","+47-555-9182","EMP004","Hannah","Berg","Product Owner","Product","Oslo","Norway","Agile Coach" +"luis.santos@importers.test","+351-555-2938","EMP005","Luis","Santos","Frontend Developer","Engineering","Lisbon","Portugal","React Native Lead" +"mei.wong@importers.test","+65-555-3847","EMP006","Mei","Wong","Business Analyst","Business","Singapore","Singapore","Finance Systems" +"viktor.kovac@importers.test","+385-555-9273","EMP007","Viktor","Kovac","Security Engineer","InfoSec","Zagreb","Croatia","Penetration Tester" +"aisha.khan@importers.test","+971-555-2837","EMP008","Aisha","Khan","UX Researcher","Design","Dubai","UAE","User Research Lead" +"thomas.mueller@importers.test","+49-555-2938","EMP009","Thomas","Mueller","Backend Engineer","Engineering","Frankfurt","Germany","Java Expert" +"sophie.dupont@importers.test","+33-555-8472","EMP010","Sophie","Dupont","Marketing Specialist","Marketing","Lyon","France","Digital Campaign Manager" +"lars.andersen@importers.test","+45-555-9283","EMP011","Lars","Andersen","System Administrator","IT","Copenhagen","Denmark","Network Infrastructure" +"carmen.silva@importers.test","+55-555-7362","EMP012","Carmen","Silva","Quality Engineer","QA","Sao Paulo","Brazil","Test Automation" +"hiroshi.sato@importers.test","+81-555-2837","EMP013","Hiroshi","Sato","Mobile Developer","Engineering","Osaka","Japan","iOS Specialist" +"eva.kowalska@importers.test","+48-555-9273","EMP014","Eva","Kowalska","HR Manager","Human Resources","Krakow","Poland","Talent Development" +"mohammed.ahmed@importers.test","+20-555-8273","EMP015","Mohammed","Ahmed","Full Stack Developer","Engineering","Cairo","Egypt","MERN Stack" +"isabella.rossi@importers.test","+39-555-2938","EMP016","Isabella","Rossi","Project Manager","PMO","Milan","Italy","Prince2 Certified" +"sean.murphy@importers.test","+353-555-8472","EMP017","Sean","Murphy","Sales Director","Sales","Dublin","Ireland","Enterprise Accounts" +"nina.kovac@importers.test","+381-555-9283","EMP018","Nina","Kovac","Content Strategist","Marketing","Belgrade","Serbia","SEO Expert" +"felix.wagner@importers.test","+43-555-7362","EMP019","Felix","Wagner","Cloud Engineer","Infrastructure","Vienna","Austria","Azure Architect" +"ana.santos@importers.test","+34-555-2837","EMP020","Ana","Santos","Data Engineer","Data","Barcelona","Spain","ETL Pipeline Expert" +"dmitri.volkov@importers.test","+7-555-9273","EMP021","Dmitri","Volkov","Backend Developer","Engineering","St Petersburg","Russia","Python Lead" +"julia.berg@importers.test","+46-555-8273","EMP022","Julia","Berg","Product Designer","Design","Stockholm","Sweden","Design Systems" +"marco.rossi@importers.test","+39-555-2938","EMP023","Marco","Rossi","Solutions Consultant","Sales","Rome","Italy","Technical Sales" +"leah.cohen@importers.test","+972-555-8472","EMP024","Leah","Cohen","Security Analyst","Security","Tel Aviv","Israel","Cyber Security" +"chen.liu@importers.test","+86-555-9283","EMP025","Chen","Liu","Operations Manager","Operations","Beijing","China","Process Optimization" +"kim.park@importers.test","+82-555-7362","EMP026","Kim","Park","Frontend Engineer","Engineering","Seoul","South Korea","Angular Expert" +"andre.silva@importers.test","+55-555-2837","EMP027","Andre","Silva","Tech Lead","Engineering","Rio de Janeiro","Brazil","Architecture Design" +"emma.virtanen@importers.test","+358-555-9273","EMP028","Emma","Virtanen","UX Designer","Design","Helsinki","Finland","Mobile UX" +"oscar.andersson@importers.test","+46-555-8273","EMP029","Oscar","Andersson","DevOps Lead","Operations","Gothenburg","Sweden","CI/CD Pipeline" +"sofia.lopez@importers.test","+56-555-2938","EMP030","Sofia","Lopez","Business Developer","Sales","Santiago","Chile","Market Expansion" +"adrian.popescu@importers.test","+40-555-8472","EMP031","Adrian","Popescu","QA Engineer","QA","Bucharest","Romania","Integration Testing" +"yuki.yamamoto@importers.test","+81-555-9283","EMP032","Yuki","Yamamoto","Mobile Developer","Engineering","Tokyo","Japan","Android Expert" +"maria.silva@importers.test","+351-555-7362","EMP033","Maria","Silva","Scrum Master","Project Management","Porto","Portugal","Agile Methods" +"erik.nielsen@importers.test","+45-555-2837","EMP034","Erik","Nielsen","Infrastructure Engineer","IT","Aarhus","Denmark","Cloud Migration" +"katerina.novak@importers.test","+420-555-9273","EMP035","Katerina","Novak","UI Developer","Engineering","Prague","Czech Republic","Design Systems" +"andrea.bianchi@importers.test","+41-555-8273","EMP036","Andrea","Bianchi","Product Manager","Product","Zurich","Switzerland","Product Strategy" +"lucas.mueller@importers.test","+49-555-2938","EMP037","Lucas","Mueller","Data Analyst","Analytics","Hamburg","Germany","Business Intelligence" +"claire.dubois@importers.test","+33-555-8472","EMP038","Claire","Dubois","Marketing Manager","Marketing","Nice","France","Growth Marketing" +"aleksandr.ivanov@importers.test","+7-555-9283","EMP039","Aleksandr","Ivanov","Support Engineer","Support","Novosibirsk","Russia","Technical Support" +"fatima.ali@importers.test","+971-555-7362","EMP040","Fatima","Ali","System Architect","Engineering","Abu Dhabi","UAE","Enterprise Architecture" +"david.wilson@importers.test","+64-555-2837","EMP041","David","Wilson","Software Engineer","Engineering","Auckland","New Zealand","Microservices" +"lisa.vanderberg@importers.test","+31-555-9273","EMP042","Lisa","Vanderberg","Product Analyst","Product","Amsterdam","Netherlands","Data Analysis" +"antonio.ferrari@importers.test","+39-555-8273","EMP043","Antonio","Ferrari","Network Engineer","Infrastructure","Turin","Italy","Network Security" +"sarah.brown@importers.test","+61-555-2938","EMP044","Sarah","Brown","Customer Success","Customer Service","Melbourne","Australia","Account Management" +"jan.kowalski@importers.test","+48-555-8472","EMP045","Jan","Kowalski","Backend Developer","Engineering","Warsaw","Poland","Golang Expert" +"marina.petrova@importers.test","+7-555-9283","EMP046","Marina","Petrova","HR Specialist","Human Resources","Kazan","Russia","Recruitment" +"carlos.garcia@importers.test","+52-555-7362","EMP047","Carlos","Garcia","Support Manager","Support","Mexico City","Mexico","Service Delivery" +"emma.larsson@importers.test","+46-555-2837","EMP048","Emma","Larsson","Frontend Lead","Engineering","Malmo","Sweden","Web Performance" +"hassan.ahmed@importers.test","+20-555-9273","EMP049","Hassan","Ahmed","Security Manager","Security","Alexandria","Egypt","InfoSec Strategy" +"akiko.saito@importers.test","+81-555-8273","EMP050","Akiko","Saito","UI/UX Designer","Design","Kyoto","Japan","Mobile Design" diff --git a/backend/testfiles/test.key b/backend/testfiles/test.key new file mode 100644 index 0000000..0b82635 --- /dev/null +++ b/backend/testfiles/test.key @@ -0,0 +1,28 @@ +-----BEGIN PRIVATE KEY----- +MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQD4qw9nGCmDiUY5 +CPIRPIGJiENhLO1nmC9ozxJ5ATnMEXkp85esv7zfc0Dix69wnpH5iRDedi1G5skE +XwwmnBF6+xB/QnHfxQE+jgAFiZpE/WKjTVpYeVmNHOIR35eC25OhG7qGMjwbHKBY +J/LonyZdkBnXQLwqFu2Q/TO5zIsT2FwldILdjvWDHo4N0/KrNrPojijRkkPmMi/z +8rK3Lw7rDOW04/9GZqfYKw4GCMRTyjvKBB3c1qj8EZMxjBaJ7i0stvnNAldDuDZR +01H+fIscA1LDqEzl8c/uO7YCPWlsXQ+OvC0hQXL1t7n9cScvkQsOIZ4h8F56fj8l +6qw6VQC/AgMBAAECggEAO9aaEcJoNv1hFxd2fLxVJZ3TLP6tMEkYW3284b5wRJGN +2REDli/biN/IpOnNZ2nZEbWwh//jHFUr2S/wEhhrQm9JBH6ePW1AQ+fSzIJUy7Y3 +trlF/NVSvdjN8+QcpwIkr9em1mUfPpzwXtZaJy0pA8nJ1hj/TpeO2l17xYGZlA/6 +ig+cixph758eQYlypixmm8q9zmDMfywSJfBFb1r9Iws1bVAU42+waN+3goLo7xfS +kosdFPIthzqwBqWHMjPU7Q/gzFxCg/ydCBSyG+OWNzJK4kCefjdJwtqdtj3SuoXC +JHsDcuRGCfDbnTVz1wrDhnml/EtiXU4n1oCCHCAJKQKBgQD9qDwbFXMsJJ7xyh/2 +8JVuvH2JhCn0u1zsadgki7qQeuIkNljiWXQI6X8hXZke2SWvh5Dnz7gAYOYY2dPg +72Wumv+3Tsm2qPS4rtiLPfZ5vWj+34ZKegT0F0KZ2DUSd1UVDhnQ6ZxNEELzd6Qq +AelStngMvEZNzzSI+sMXRco8FwKBgQD69wd0HXpueSj4OJJ3tJMFdIGkTupYLVDK +YJ0ooZtlYdom8v9m25Xggw/hSpAa16tD/j6nwNI6CaOQdmxQt4Q3CGW3YcGHbnhh +kSC97ZlgB39OjMBdbPF+XFHamAzSB2guBYm+iHkRol4UwXi5ODqqseG1uiZ2gLNk +px6DlX0BmQKBgQCFXCX9t1FOLBLE3JfhpKtsBP2bhzjnynoYca2JyUzVGUY787bX +imBPLzP/vs36QgBmQMgWmEcNK+TNu/iBklvLaQJ4xZuXxUykrIh87J7hcov5NFvO +XEiubGw/pkSA6Ox6nWdFRw0+FwacKpLM4yRgsD59s57i2cWtHunjGnYxEQKBgQDX +FnCk677Kf7zFQHcHcROwUTHrCFeASxY63KLU9+ye70Zwq0KJfRju2mT5yVXIK571 +HynDzFZgt3Ifld8vLpl6MRA8IuQilmNF+4P8Q254BvTs9WTIrV4r1iOxHS+fsglv +p/9t9LkOjAHl+Arnc3GUae/wOCJO3e7iDPjx/WStwQKBgF8RA6Be9tTIb+icW2P7 +DXBu6HK/VOYnm+eu7hsG2O/GfSBx6Iypmhi1rrmvpTwxbPNGPccU4lRcXquBWCCv +o9t3DKDAmmUEPUtDGsccAyK3j1QysdOeqpSqta39D7YRavuBdyIOZz/K09lOjpmM +szbZqwTDP5yhPqZTSmv+HwWn +-----END PRIVATE KEY----- diff --git a/backend/testfiles/test.pem b/backend/testfiles/test.pem new file mode 100644 index 0000000..89e459e --- /dev/null +++ b/backend/testfiles/test.pem @@ -0,0 +1,22 @@ +-----BEGIN CERTIFICATE----- +MIIDmTCCAoGgAwIBAgIUdKM52RZo5tB8i68IGJXPtDPSlCQwDQYJKoZIhvcNAQEL +BQAwXDELMAkGA1UEBhMCREsxEzARBgNVBAgMClNvbWUtU3RhdGUxFjAUBgNVBAoM +DVBoaXNoaW5nIENsdWIxIDAeBgNVBAMMF3Rlc3QucGhpc2hpbmcuY2x1Yi50ZXN0 +MB4XDTI1MDExNzExMTY1MFoXDTI2MDExNzExMTY1MFowXDELMAkGA1UEBhMCREsx +EzARBgNVBAgMClNvbWUtU3RhdGUxFjAUBgNVBAoMDVBoaXNoaW5nIENsdWIxIDAe +BgNVBAMMF3Rlc3QucGhpc2hpbmcuY2x1Yi50ZXN0MIIBIjANBgkqhkiG9w0BAQEF +AAOCAQ8AMIIBCgKCAQEA+KsPZxgpg4lGOQjyETyBiYhDYSztZ5gvaM8SeQE5zBF5 +KfOXrL+833NA4sevcJ6R+YkQ3nYtRubJBF8MJpwRevsQf0Jx38UBPo4ABYmaRP1i +o01aWHlZjRziEd+XgtuToRu6hjI8GxygWCfy6J8mXZAZ10C8KhbtkP0zucyLE9hc +JXSC3Y71gx6ODdPyqzaz6I4o0ZJD5jIv8/Kyty8O6wzltOP/Rman2CsOBgjEU8o7 +ygQd3Nao/BGTMYwWie4tLLb5zQJXQ7g2UdNR/nyLHANSw6hM5fHP7ju2Aj1pbF0P +jrwtIUFy9be5/XEnL5ELDiGeIfBeen4/JeqsOlUAvwIDAQABo1MwUTAdBgNVHQ4E +FgQUB3yXKJftrXh07NBOb2YFCCXDpgEwHwYDVR0jBBgwFoAUB3yXKJftrXh07NBO +b2YFCCXDpgEwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG9w0BAQsFAAOCAQEAu/TV +0lLwd5u+4oz4ruYD4SVYlxliMww7zIMeZZxMBgq8AjsMb2LsT7lcwpA3Ix4JN22j +mtcAL/DAuEz5Uz01w3gSvxmv3GbrJgiOVNdaFMEPZ/R43deAl/WJu9bjtM2eGtF8 +f6RJkMn9htrywn+7m5+g3dNa1oM0IeuzHkaLLHHFoT4L+eN1pykid/Tze+Gi0/In +HSk/Zck+OGpj0gr+75WKAH7NWfufy0p3rdYM4gwrN1wxaqGyxsJPjT9RMO+YWJJZ +BcEULHlyf+c340FCE8DdHCeCi2MMhicgS11If0ICPKw0ztxjLZFZMsLfPa54c0Vh +R7AIqYjwAahiechKvQ== +-----END CERTIFICATE----- diff --git a/backend/utils/crypt.go b/backend/utils/crypt.go new file mode 100644 index 0000000..9cdbc14 --- /dev/null +++ b/backend/utils/crypt.go @@ -0,0 +1,69 @@ +package utils + +import ( + "crypto/aes" + "crypto/cipher" + "crypto/rand" + "encoding/base64" + "io" + "strings" + + "github.com/google/uuid" + "github.com/phishingclub/phishingclub/errs" +) + +func Encrypt(s string, secret string) (string, error) { + block, err := aes.NewCipher([]byte(secret)) + if err != nil { + return "", errs.Wrap(err) + } + + gcm, err := cipher.NewGCM(block) + if err != nil { + return "", errs.Wrap(err) + } + + nonce := make([]byte, gcm.NonceSize()) + if _, err = io.ReadFull(rand.Reader, nonce); err != nil { + return "", errs.Wrap(err) + } + + ciphertext := gcm.Seal(nonce, nonce, []byte(s), nil) + return base64.URLEncoding.EncodeToString(ciphertext), nil +} + +func Decrypt(s string, secret string) (string, error) { + block, err := aes.NewCipher([]byte(secret)) + if err != nil { + return "", errs.Wrap(err) + } + + gcm, err := cipher.NewGCM(block) + if err != nil { + return "", errs.Wrap(err) + } + + data, err := base64.URLEncoding.DecodeString(s) + if err != nil { + return "", errs.Wrap(err) + } + + nonceSize := gcm.NonceSize() + if len(data) < nonceSize { + return "", errs.Wrap(err) + } + + nonce, ciphertext := data[:nonceSize], data[nonceSize:] + plaintext, err := gcm.Open(nil, nonce, ciphertext, nil) + if err != nil { + return "", errs.Wrap(err) + } + + return string(plaintext), nil +} + +// UUIDToSecret converts a UUIDv4 to a 32 char secret string by +// removing the '-' between the UUID parts +func UUIDToSecret(id *uuid.UUID) string { + return strings.ReplaceAll(id.String(), "-", "") +} diff --git a/backend/utils/csv.go b/backend/utils/csv.go new file mode 100644 index 0000000..78b1677 --- /dev/null +++ b/backend/utils/csv.go @@ -0,0 +1,23 @@ +package utils + +import ( + "strings" + "time" +) + +func CSVRemoveFormulaStart(input string) string { + if input == "" { + return input + } + if len(input) > 0 && strings.ContainsAny(input[0:1], "=@+-") { + return "'" + input + } + return input +} + +func CSVFromDate(d *time.Time) string { + if d == nil { + return "" + } + return CSVRemoveFormulaStart(d.Format(time.RFC3339)) +} diff --git a/backend/utils/json.go b/backend/utils/json.go new file mode 100644 index 0000000..a70b600 --- /dev/null +++ b/backend/utils/json.go @@ -0,0 +1,3639 @@ +// Copyright 2010 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Represents JSON data structure using native Go types: booleans, floats, +// strings, arrays, and maps. + +package utils + +import ( + "bytes" + "encoding" + "encoding/base64" + "fmt" + "math" + "reflect" + "slices" + "sort" + "strconv" + "strings" + "sync" + "unicode" + "unicode/utf16" + "unicode/utf8" + + "github.com/phishingclub/phishingclub/errs" +) + +// Unmarshal parses the JSON-encoded data and stores the result +// in the value pointed to by v. If v is nil or not a pointer, +// Unmarshal returns an [InvalidUnmarshalError]. +// +// Unmarshal uses the inverse of the encodings that +// [Marshal] uses, allocating maps, slices, and pointers as necessary, +// with the following additional rules: +// +// To unmarshal JSON into a pointer, Unmarshal first handles the case of +// the JSON being the JSON literal null. In that case, Unmarshal sets +// the pointer to nil. Otherwise, Unmarshal unmarshals the JSON into +// the value pointed at by the pointer. If the pointer is nil, Unmarshal +// allocates a new value for it to point to. +// +// To unmarshal JSON into a value implementing [Unmarshaler], +// Unmarshal calls that value's [Unmarshaler.UnmarshalJSON] method, including +// when the input is a JSON null. +// Otherwise, if the value implements [encoding.TextUnmarshaler] +// and the input is a JSON quoted string, Unmarshal calls +// [encoding.TextUnmarshaler.UnmarshalText] with the unquoted form of the string. +// +// To unmarshal JSON into a struct, Unmarshal matches incoming object +// keys to the keys used by [Marshal] (either the struct field name or its tag), +// preferring an exact match but also accepting a case-insensitive match. By +// default, object keys which don't have a corresponding struct field are +// ignored (see [Decoder.DisallowUnknownFields] for an alternative). +// +// To unmarshal JSON into an interface value, +// Unmarshal stores one of these in the interface value: +// +// - bool, for JSON booleans +// - float64, for JSON numbers +// - string, for JSON strings +// - []interface{}, for JSON arrays +// - map[string]interface{}, for JSON objects +// - nil for JSON null +// +// To unmarshal a JSON array into a slice, Unmarshal resets the slice length +// to zero and then appends each element to the slice. +// As a special case, to unmarshal an empty JSON array into a slice, +// Unmarshal replaces the slice with a new empty slice. +// +// To unmarshal a JSON array into a Go array, Unmarshal decodes +// JSON array elements into corresponding Go array elements. +// If the Go array is smaller than the JSON array, +// the additional JSON array elements are discarded. +// If the JSON array is smaller than the Go array, +// the additional Go array elements are set to zero values. +// +// To unmarshal a JSON object into a map, Unmarshal first establishes a map to +// use. If the map is nil, Unmarshal allocates a new map. Otherwise Unmarshal +// reuses the existing map, keeping existing entries. Unmarshal then stores +// key-value pairs from the JSON object into the map. The map's key type must +// either be any string type, an integer, implement [json.Unmarshaler], or +// implement [encoding.TextUnmarshaler]. +// +// If the JSON-encoded data contain a syntax error, Unmarshal returns a [SyntaxError]. +// +// If a JSON value is not appropriate for a given target type, +// or if a JSON number overflows the target type, Unmarshal +// skips that field and completes the unmarshaling as best it can. +// If no more serious errors are encountered, Unmarshal returns +// an [UnmarshalTypeError] describing the earliest such error. In any +// case, it's not guaranteed that all the remaining fields following +// the problematic one will be unmarshaled into the target object. +// +// The JSON null value unmarshals into an interface, map, pointer, or slice +// by setting that Go value to nil. Because null is often used in JSON to mean +// “not present,” unmarshaling a JSON null into any other Go type has no effect +// on the value and produces no error. +// +// When unmarshaling quoted strings, invalid UTF-8 or +// invalid UTF-16 surrogate pairs are not treated as an error. +// Instead, they are replaced by the Unicode replacement +// character U+FFFD. +func Unmarshal(data []byte, v any) error { + // Check for well-formedness. + // Avoids filling out half a data structure + // before discovering a JSON syntax error. + var d decodeState + err := checkValid(data, &d.scan) + if err != nil { + return err + } + + d.init(data) + return d.unmarshal(v) +} + +// Unmarshaler is the interface implemented by types +// that can unmarshal a JSON description of themselves. +// The input can be assumed to be a valid encoding of +// a JSON value. UnmarshalJSON must copy the JSON data +// if it wishes to retain the data after returning. +// +// By convention, to approximate the behavior of [Unmarshal] itself, +// Unmarshalers implement UnmarshalJSON([]byte("null")) as a no-op. +type Unmarshaler interface { + UnmarshalJSON([]byte) error +} + +// An UnmarshalTypeError describes a JSON value that was +// not appropriate for a value of a specific Go type. +type UnmarshalTypeError struct { + Value string // description of JSON value - "bool", "array", "number -5" + Type reflect.Type // type of Go value it could not be assigned to + Offset int64 // error occurred after reading Offset bytes + Struct string // name of the struct type containing the field + Field string // the full path from root node to the field +} + +func (e *UnmarshalTypeError) Error() string { + if e.Struct != "" || e.Field != "" { + return "json: cannot unmarshal " + e.Value + " into Go struct field " + e.Struct + "." + e.Field + " of type " + e.Type.String() + } + if e.Field != "" { + return "json: cannot unmarshal " + e.Value + " into Go struct field " + e.Field + " of type " + e.Type.String() + } + return "json: cannot unmarshal " + e.Value + " into Go value of type " + e.Type.String() +} + +// An UnmarshalFieldError describes a JSON object key that +// led to an unexported (and therefore unwritable) struct field. +// +// Deprecated: No longer used; kept for compatibility. +type UnmarshalFieldError struct { + Key string + Type reflect.Type + Field reflect.StructField +} + +func (e *UnmarshalFieldError) Error() string { + return "json: cannot unmarshal object key " + strconv.Quote(e.Key) + " into unexported field " + e.Field.Name + " of type " + e.Type.String() +} + +// An InvalidUnmarshalError describes an invalid argument passed to [Unmarshal]. +// (The argument to [Unmarshal] must be a non-nil pointer.) +type InvalidUnmarshalError struct { + Type reflect.Type +} + +func (e *InvalidUnmarshalError) Error() string { + if e.Type == nil { + return "json: Unmarshal(nil)" + } + + if e.Type.Kind() != reflect.Pointer { + return "json: Unmarshal(non-pointer " + e.Type.String() + ")" + } + return "json: Unmarshal(nil " + e.Type.String() + ")" +} + +func (d *decodeState) unmarshal(v any) error { + rv := reflect.ValueOf(v) + if rv.Kind() != reflect.Pointer || rv.IsNil() { + return &InvalidUnmarshalError{reflect.TypeOf(v)} + } + + d.scan.reset() + d.scanWhile(scanSkipSpace) + // We decode rv not rv.Elem because the Unmarshaler interface + // test must be applied at the top level of the value. + err := d.value(rv) + if err != nil { + return d.addErrorContext(err) + } + return d.savedError +} + +// A Number represents a JSON number literal. +type Number string + +// String returns the literal text of the number. +func (n Number) String() string { return string(n) } + +// Float64 returns the number as a float64. +func (n Number) Float64() (float64, error) { + return strconv.ParseFloat(string(n), 64) +} + +// Int64 returns the number as an int64. +func (n Number) Int64() (int64, error) { + return strconv.ParseInt(string(n), 10, 64) +} + +// An errorContext provides context for type errors during decoding. +type errorContext struct { + Struct reflect.Type + FieldStack []string +} + +// decodeState represents the state while decoding a JSON value. +type decodeState struct { + data []byte + off int // next read offset in data + opcode int // last read result + scan scanner + errorContext *errorContext + savedError error + useNumber bool + disallowUnknownFields bool +} + +// readIndex returns the position of the last byte read. +func (d *decodeState) readIndex() int { + return d.off - 1 +} + +// phasePanicMsg is used as a panic message when we end up with something that +// shouldn't happen. It can indicate a bug in the JSON decoder, or that +// something is editing the data slice while the decoder executes. +const phasePanicMsg = "JSON decoder out of sync - data changing underfoot?" + +func (d *decodeState) init(data []byte) *decodeState { + d.data = data + d.off = 0 + d.savedError = nil + if d.errorContext != nil { + d.errorContext.Struct = nil + // Reuse the allocated space for the FieldStack slice. + d.errorContext.FieldStack = d.errorContext.FieldStack[:0] + } + return d +} + +// saveError saves the first err it is called with, +// for reporting at the end of the unmarshal. +func (d *decodeState) saveError(err error) { + if d.savedError == nil { + d.savedError = d.addErrorContext(err) + } +} + +// addErrorContext returns a new error enhanced with information from d.errorContext +func (d *decodeState) addErrorContext(err error) error { + if d.errorContext != nil && (d.errorContext.Struct != nil || len(d.errorContext.FieldStack) > 0) { + switch err := err.(type) { + case *UnmarshalTypeError: + err.Struct = d.errorContext.Struct.Name() + err.Field = strings.Join(d.errorContext.FieldStack, ".") + default: + if d.errorContext.FieldStack != nil { + field := strings.Join(d.errorContext.FieldStack, ".") + return fmt.Errorf("%s: %v", field, err) + } + } + } + return err +} + +// skip scans to the end of what was started. +func (d *decodeState) skip() { + s, data, i := &d.scan, d.data, d.off + depth := len(s.parseState) + for { + op := s.step(s, data[i]) + i++ + if len(s.parseState) < depth { + d.off = i + d.opcode = op + return + } + } +} + +// scanNext processes the byte at d.data[d.off]. +func (d *decodeState) scanNext() { + if d.off < len(d.data) { + d.opcode = d.scan.step(&d.scan, d.data[d.off]) + d.off++ + } else { + d.opcode = d.scan.eof() + d.off = len(d.data) + 1 // mark processed EOF with len+1 + } +} + +// scanWhile processes bytes in d.data[d.off:] until it +// receives a scan code not equal to op. +func (d *decodeState) scanWhile(op int) { + s, data, i := &d.scan, d.data, d.off + for i < len(data) { + newOp := s.step(s, data[i]) + i++ + if newOp != op { + d.opcode = newOp + d.off = i + return + } + } + + d.off = len(data) + 1 // mark processed EOF with len+1 + d.opcode = d.scan.eof() +} + +// rescanLiteral is similar to scanWhile(scanContinue), but it specialises the +// common case where we're decoding a literal. The decoder scans the input +// twice, once for syntax errors and to check the length of the value, and the +// second to perform the decoding. +// +// Only in the second step do we use decodeState to tokenize literals, so we +// know there aren't any syntax errors. We can take advantage of that knowledge, +// and scan a literal's bytes much more quickly. +func (d *decodeState) rescanLiteral() { + data, i := d.data, d.off +Switch: + switch data[i-1] { + case '"': // string + for ; i < len(data); i++ { + switch data[i] { + case '\\': + i++ // escaped char + case '"': + i++ // tokenize the closing quote too + break Switch + } + } + case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', '-': // number + for ; i < len(data); i++ { + switch data[i] { + case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', + '.', 'e', 'E', '+', '-': + default: + break Switch + } + } + case 't': // true + i += len("rue") + case 'f': // false + i += len("alse") + case 'n': // null + i += len("ull") + } + if i < len(data) { + d.opcode = stateEndValue(&d.scan, data[i]) + } else { + d.opcode = scanEnd + } + d.off = i + 1 +} + +// value consumes a JSON value from d.data[d.off-1:], decoding into v, and +// reads the following byte ahead. If v is invalid, the value is discarded. +// The first byte of the value has been read already. +func (d *decodeState) value(v reflect.Value) error { + switch d.opcode { + default: + panic(phasePanicMsg) + + case scanBeginArray: + if v.IsValid() { + if err := d.array(v); err != nil { + return err + } + } else { + d.skip() + } + d.scanNext() + + case scanBeginObject: + if v.IsValid() { + if err := d.object(v); err != nil { + return err + } + } else { + d.skip() + } + d.scanNext() + + case scanBeginLiteral: + // All bytes inside literal return scanContinue op code. + start := d.readIndex() + d.rescanLiteral() + + if v.IsValid() { + if err := d.literalStore(d.data[start:d.readIndex()], v, false); err != nil { + return err + } + } + } + return nil +} + +type unquotedValue struct{} + +// valueQuoted is like value but decodes a +// quoted string literal or literal null into an interface value. +// If it finds anything other than a quoted string literal or null, +// valueQuoted returns unquotedValue{}. +func (d *decodeState) valueQuoted() any { + switch d.opcode { + default: + panic(phasePanicMsg) + + case scanBeginArray, scanBeginObject: + d.skip() + d.scanNext() + + case scanBeginLiteral: + v := d.literalInterface() + switch v.(type) { + case nil, string: + return v + } + } + return unquotedValue{} +} + +// indirect walks down v allocating pointers as needed, +// until it gets to a non-pointer. +// If it encounters an Unmarshaler, indirect stops and returns that. +// If decodingNull is true, indirect stops at the first settable pointer so it +// can be set to nil. +func indirect(v reflect.Value, decodingNull bool) (Unmarshaler, encoding.TextUnmarshaler, reflect.Value) { + // Issue #24153 indicates that it is generally not a guaranteed property + // that you may round-trip a reflect.Value by calling Value.Addr().Elem() + // and expect the value to still be settable for values derived from + // unexported embedded struct fields. + // + // The logic below effectively does this when it first addresses the value + // (to satisfy possible pointer methods) and continues to dereference + // subsequent pointers as necessary. + // + // After the first round-trip, we set v back to the original value to + // preserve the original RW flags contained in reflect.Value. + v0 := v + haveAddr := false + + // If v is a named type and is addressable, + // start with its address, so that if the type has pointer methods, + // we find them. + if v.Kind() != reflect.Pointer && v.Type().Name() != "" && v.CanAddr() { + haveAddr = true + v = v.Addr() + } + for { + // Load value from interface, but only if the result will be + // usefully addressable. + if v.Kind() == reflect.Interface && !v.IsNil() { + e := v.Elem() + if e.Kind() == reflect.Pointer && !e.IsNil() && (!decodingNull || e.Elem().Kind() == reflect.Pointer) { + haveAddr = false + v = e + continue + } + } + + if v.Kind() != reflect.Pointer { + break + } + + if decodingNull && v.CanSet() { + break + } + + // Prevent infinite loop if v is an interface pointing to its own address: + // var v interface{} + // v = &v + if v.Elem().Kind() == reflect.Interface && v.Elem().Elem() == v { + v = v.Elem() + break + } + if v.IsNil() { + v.Set(reflect.New(v.Type().Elem())) + } + if v.Type().NumMethod() > 0 && v.CanInterface() { + if u, ok := v.Interface().(Unmarshaler); ok { + return u, nil, reflect.Value{} + } + if !decodingNull { + if u, ok := v.Interface().(encoding.TextUnmarshaler); ok { + return nil, u, reflect.Value{} + } + } + } + + if haveAddr { + v = v0 // restore original value after round-trip Value.Addr().Elem() + haveAddr = false + } else { + v = v.Elem() + } + } + return nil, nil, v +} + +// array consumes an array from d.data[d.off-1:], decoding into v. +// The first byte of the array ('[') has been read already. +func (d *decodeState) array(v reflect.Value) error { + // Check for unmarshaler. + u, ut, pv := indirect(v, false) + if u != nil { + start := d.readIndex() + d.skip() + return u.UnmarshalJSON(d.data[start:d.off]) + } + if ut != nil { + d.saveError(&UnmarshalTypeError{Value: "array", Type: v.Type(), Offset: int64(d.off)}) + d.skip() + return nil + } + v = pv + + // Check type of target. + switch v.Kind() { + case reflect.Interface: + if v.NumMethod() == 0 { + // Decoding into nil interface? Switch to non-reflect code. + ai := d.arrayInterface() + v.Set(reflect.ValueOf(ai)) + return nil + } + // Otherwise it's invalid. + fallthrough + default: + d.saveError(&UnmarshalTypeError{Value: "array", Type: v.Type(), Offset: int64(d.off)}) + d.skip() + return nil + case reflect.Array, reflect.Slice: + break + } + + i := 0 + for { + // Look ahead for ] - can only happen on first iteration. + d.scanWhile(scanSkipSpace) + if d.opcode == scanEndArray { + break + } + + // Expand slice length, growing the slice if necessary. + if v.Kind() == reflect.Slice { + if i >= v.Cap() { + v.Grow(1) + } + if i >= v.Len() { + v.SetLen(i + 1) + } + } + + if i < v.Len() { + // Decode into element. + if err := d.value(v.Index(i)); err != nil { + return err + } + } else { + // Ran out of fixed array: skip. + if err := d.value(reflect.Value{}); err != nil { + return err + } + } + i++ + + // Next token must be , or ]. + if d.opcode == scanSkipSpace { + d.scanWhile(scanSkipSpace) + } + if d.opcode == scanEndArray { + break + } + if d.opcode != scanArrayValue { + panic(phasePanicMsg) + } + } + + if i < v.Len() { + if v.Kind() == reflect.Array { + for ; i < v.Len(); i++ { + v.Index(i).SetZero() // zero remainder of array + } + } else { + v.SetLen(i) // truncate the slice + } + } + if i == 0 && v.Kind() == reflect.Slice { + v.Set(reflect.MakeSlice(v.Type(), 0, 0)) + } + return nil +} + +var nullLiteral = []byte("null") +var textUnmarshalerType = reflect.TypeFor[encoding.TextUnmarshaler]() + +// object consumes an object from d.data[d.off-1:], decoding into v. +// The first byte ('{') of the object has been read already. +func (d *decodeState) object(v reflect.Value) error { + // Check for unmarshaler. + u, ut, pv := indirect(v, false) + if u != nil { + start := d.readIndex() + d.skip() + return u.UnmarshalJSON(d.data[start:d.off]) + } + if ut != nil { + d.saveError(&UnmarshalTypeError{Value: "object", Type: v.Type(), Offset: int64(d.off)}) + d.skip() + return nil + } + v = pv + t := v.Type() + + // Decoding into nil interface? Switch to non-reflect code. + if v.Kind() == reflect.Interface && v.NumMethod() == 0 { + oi := d.objectInterface() + v.Set(reflect.ValueOf(oi)) + return nil + } + + var fields structFields + + // Check type of target: + // struct or + // map[T1]T2 where T1 is string, an integer type, + // or an encoding.TextUnmarshaler + switch v.Kind() { + case reflect.Map: + // Map key must either have string kind, have an integer kind, + // or be an encoding.TextUnmarshaler. + switch t.Key().Kind() { + case reflect.String, + reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, + reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: + default: + if !reflect.PointerTo(t.Key()).Implements(textUnmarshalerType) { + d.saveError(&UnmarshalTypeError{Value: "object", Type: t, Offset: int64(d.off)}) + d.skip() + return nil + } + } + if v.IsNil() { + v.Set(reflect.MakeMap(t)) + } + case reflect.Struct: + fields = cachedTypeFields(t) + // ok + default: + d.saveError(&UnmarshalTypeError{Value: "object", Type: t, Offset: int64(d.off)}) + d.skip() + return nil + } + + var mapElem reflect.Value + var origErrorContext errorContext + if d.errorContext != nil { + origErrorContext = *d.errorContext + } + + for { + // Read opening " of string key or closing }. + d.scanWhile(scanSkipSpace) + if d.opcode == scanEndObject { + // closing } - can only happen on first iteration. + break + } + if d.opcode != scanBeginLiteral { + panic(phasePanicMsg) + } + + // Read key. + start := d.readIndex() + d.rescanLiteral() + item := d.data[start:d.readIndex()] + key, ok := unquoteBytes(item) + if !ok { + panic(phasePanicMsg) + } + + // Figure out field corresponding to key. + var subv reflect.Value + destring := false // whether the value is wrapped in a string to be decoded first + + if v.Kind() == reflect.Map { + elemType := t.Elem() + if !mapElem.IsValid() { + mapElem = reflect.New(elemType).Elem() + } else { + mapElem.SetZero() + } + subv = mapElem + } else { + f := fields.byExactName[string(key)] + if f == nil { + f = fields.byFoldedName[string(foldName(key))] + } + if f != nil { + subv = v + destring = f.quoted + for _, i := range f.index { + if subv.Kind() == reflect.Pointer { + if subv.IsNil() { + // If a struct embeds a pointer to an unexported type, + // it is not possible to set a newly allocated value + // since the field is unexported. + // + // See https://golang.org/issue/21357 + if !subv.CanSet() { + d.saveError(fmt.Errorf("json: cannot set embedded pointer to unexported struct: %v", subv.Type().Elem())) + // Invalidate subv to ensure d.value(subv) skips over + // the JSON value without assigning it to subv. + subv = reflect.Value{} + destring = false + break + } + subv.Set(reflect.New(subv.Type().Elem())) + } + subv = subv.Elem() + } + subv = subv.Field(i) + } + if d.errorContext == nil { + d.errorContext = new(errorContext) + } + d.errorContext.FieldStack = append(d.errorContext.FieldStack, f.name) + d.errorContext.Struct = t + } else if d.disallowUnknownFields { + d.saveError(fmt.Errorf("json: unknown field %q", key)) + } + } + + // Read : before value. + if d.opcode == scanSkipSpace { + d.scanWhile(scanSkipSpace) + } + if d.opcode != scanObjectKey { + panic(phasePanicMsg) + } + d.scanWhile(scanSkipSpace) + + if destring { + switch qv := d.valueQuoted().(type) { + case nil: + if err := d.literalStore(nullLiteral, subv, false); err != nil { + return err + } + case string: + if err := d.literalStore([]byte(qv), subv, true); err != nil { + return err + } + default: + d.saveError(fmt.Errorf("json: invalid use of ,string struct tag, trying to unmarshal unquoted value into %v", subv.Type())) + } + } else { + if err := d.value(subv); err != nil { + return err + } + } + + // Write value back to map; + // if using struct, subv points into struct already. + if v.Kind() == reflect.Map { + kt := t.Key() + var kv reflect.Value + if reflect.PointerTo(kt).Implements(textUnmarshalerType) { + kv = reflect.New(kt) + if err := d.literalStore(item, kv, true); err != nil { + return err + } + kv = kv.Elem() + } else { + switch kt.Kind() { + case reflect.String: + kv = reflect.New(kt).Elem() + kv.SetString(string(key)) + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + s := string(key) + n, err := strconv.ParseInt(s, 10, 64) + if err != nil || reflect.Zero(kt).OverflowInt(n) { + d.saveError(&UnmarshalTypeError{Value: "number " + s, Type: kt, Offset: int64(start + 1)}) + break + } + kv = reflect.New(kt).Elem() + kv.SetInt(n) + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: + s := string(key) + n, err := strconv.ParseUint(s, 10, 64) + if err != nil || reflect.Zero(kt).OverflowUint(n) { + d.saveError(&UnmarshalTypeError{Value: "number " + s, Type: kt, Offset: int64(start + 1)}) + break + } + kv = reflect.New(kt).Elem() + kv.SetUint(n) + default: + panic("json: Unexpected key type") // should never occur + } + } + if kv.IsValid() { + v.SetMapIndex(kv, subv) + } + } + + // Next token must be , or }. + if d.opcode == scanSkipSpace { + d.scanWhile(scanSkipSpace) + } + if d.errorContext != nil { + // Reset errorContext to its original state. + // Keep the same underlying array for FieldStack, to reuse the + // space and avoid unnecessary allocs. + d.errorContext.FieldStack = d.errorContext.FieldStack[:len(origErrorContext.FieldStack)] + d.errorContext.Struct = origErrorContext.Struct + } + if d.opcode == scanEndObject { + break + } + if d.opcode != scanObjectValue { + panic(phasePanicMsg) + } + } + return nil +} + +// convertNumber converts the number literal s to a float64 or a Number +// depending on the setting of d.useNumber. +func (d *decodeState) convertNumber(s string) (any, error) { + if d.useNumber { + return Number(s), nil + } + f, err := strconv.ParseFloat(s, 64) + if err != nil { + return nil, &UnmarshalTypeError{Value: "number " + s, Type: reflect.TypeFor[float64](), Offset: int64(d.off)} + } + return f, nil +} + +var numberType = reflect.TypeFor[Number]() + +// literalStore decodes a literal stored in item into v. +// +// fromQuoted indicates whether this literal came from unwrapping a +// string from the ",string" struct tag option. this is used only to +// produce more helpful error messages. +func (d *decodeState) literalStore(item []byte, v reflect.Value, fromQuoted bool) error { + // Check for unmarshaler. + if len(item) == 0 { + // Empty string given. + d.saveError(fmt.Errorf("json: invalid use of ,string struct tag, trying to unmarshal %q into %v", item, v.Type())) + return nil + } + isNull := item[0] == 'n' // null + u, ut, pv := indirect(v, isNull) + if u != nil { + return u.UnmarshalJSON(item) + } + if ut != nil { + if item[0] != '"' { + if fromQuoted { + d.saveError(fmt.Errorf("json: invalid use of ,string struct tag, trying to unmarshal %q into %v", item, v.Type())) + return nil + } + val := "number" + switch item[0] { + case 'n': + val = "null" + case 't', 'f': + val = "bool" + } + d.saveError(&UnmarshalTypeError{Value: val, Type: v.Type(), Offset: int64(d.readIndex())}) + return nil + } + s, ok := unquoteBytes(item) + if !ok { + if fromQuoted { + return fmt.Errorf("json: invalid use of ,string struct tag, trying to unmarshal %q into %v", item, v.Type()) + } + panic(phasePanicMsg) + } + return ut.UnmarshalText(s) + } + + v = pv + + switch c := item[0]; c { + case 'n': // null + // The main parser checks that only true and false can reach here, + // but if this was a quoted string input, it could be anything. + if fromQuoted && string(item) != "null" { + d.saveError(fmt.Errorf("json: invalid use of ,string struct tag, trying to unmarshal %q into %v", item, v.Type())) + break + } + switch v.Kind() { + case reflect.Interface, reflect.Pointer, reflect.Map, reflect.Slice: + v.SetZero() + // otherwise, ignore null for primitives/string + } + case 't', 'f': // true, false + value := item[0] == 't' + // The main parser checks that only true and false can reach here, + // but if this was a quoted string input, it could be anything. + if fromQuoted && string(item) != "true" && string(item) != "false" { + d.saveError(fmt.Errorf("json: invalid use of ,string struct tag, trying to unmarshal %q into %v", item, v.Type())) + break + } + switch v.Kind() { + default: + if fromQuoted { + d.saveError(fmt.Errorf("json: invalid use of ,string struct tag, trying to unmarshal %q into %v", item, v.Type())) + } else { + d.saveError(&UnmarshalTypeError{Value: "bool", Type: v.Type(), Offset: int64(d.readIndex())}) + } + case reflect.Bool: + v.SetBool(value) + case reflect.Interface: + if v.NumMethod() == 0 { + v.Set(reflect.ValueOf(value)) + } else { + d.saveError(&UnmarshalTypeError{Value: "bool", Type: v.Type(), Offset: int64(d.readIndex())}) + } + } + + case '"': // string + s, ok := unquoteBytes(item) + if !ok { + if fromQuoted { + return fmt.Errorf("json: invalid use of ,string struct tag, trying to unmarshal %q into %v", item, v.Type()) + } + panic(phasePanicMsg) + } + switch v.Kind() { + default: + d.saveError(&UnmarshalTypeError{Value: "string", Type: v.Type(), Offset: int64(d.readIndex())}) + case reflect.Slice: + if v.Type().Elem().Kind() != reflect.Uint8 { + d.saveError(&UnmarshalTypeError{Value: "string", Type: v.Type(), Offset: int64(d.readIndex())}) + break + } + b := make([]byte, base64.StdEncoding.DecodedLen(len(s))) + n, err := base64.StdEncoding.Decode(b, s) + if err != nil { + d.saveError(err) + break + } + v.SetBytes(b[:n]) + case reflect.String: + if v.Type() == numberType && !isValidNumber(string(s)) { + return fmt.Errorf("json: invalid number literal, trying to unmarshal %q into Number", item) + } + v.SetString(string(s)) + case reflect.Interface: + if v.NumMethod() == 0 { + v.Set(reflect.ValueOf(string(s))) + } else { + d.saveError(&UnmarshalTypeError{Value: "string", Type: v.Type(), Offset: int64(d.readIndex())}) + } + } + + default: // number + if c != '-' && (c < '0' || c > '9') { + if fromQuoted { + return fmt.Errorf("json: invalid use of ,string struct tag, trying to unmarshal %q into %v", item, v.Type()) + } + panic(phasePanicMsg) + } + switch v.Kind() { + default: + if v.Kind() == reflect.String && v.Type() == numberType { + // s must be a valid number, because it's + // already been tokenized. + v.SetString(string(item)) + break + } + if fromQuoted { + return fmt.Errorf("json: invalid use of ,string struct tag, trying to unmarshal %q into %v", item, v.Type()) + } + d.saveError(&UnmarshalTypeError{Value: "number", Type: v.Type(), Offset: int64(d.readIndex())}) + case reflect.Interface: + n, err := d.convertNumber(string(item)) + if err != nil { + d.saveError(err) + break + } + if v.NumMethod() != 0 { + d.saveError(&UnmarshalTypeError{Value: "number", Type: v.Type(), Offset: int64(d.readIndex())}) + break + } + v.Set(reflect.ValueOf(n)) + + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + n, err := strconv.ParseInt(string(item), 10, 64) + if err != nil || v.OverflowInt(n) { + d.saveError(&UnmarshalTypeError{Value: "number " + string(item), Type: v.Type(), Offset: int64(d.readIndex())}) + break + } + v.SetInt(n) + + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: + n, err := strconv.ParseUint(string(item), 10, 64) + if err != nil || v.OverflowUint(n) { + d.saveError(&UnmarshalTypeError{Value: "number " + string(item), Type: v.Type(), Offset: int64(d.readIndex())}) + break + } + v.SetUint(n) + + case reflect.Float32, reflect.Float64: + n, err := strconv.ParseFloat(string(item), v.Type().Bits()) + if err != nil || v.OverflowFloat(n) { + d.saveError(&UnmarshalTypeError{Value: "number " + string(item), Type: v.Type(), Offset: int64(d.readIndex())}) + break + } + v.SetFloat(n) + } + } + return nil +} + +// The xxxInterface routines build up a value to be stored +// in an empty interface. They are not strictly necessary, +// but they avoid the weight of reflection in this common case. + +// valueInterface is like value but returns interface{} +func (d *decodeState) valueInterface() (val any) { + switch d.opcode { + default: + panic(phasePanicMsg) + case scanBeginArray: + val = d.arrayInterface() + d.scanNext() + case scanBeginObject: + val = d.objectInterface() + d.scanNext() + case scanBeginLiteral: + val = d.literalInterface() + } + return +} + +// arrayInterface is like array but returns []interface{}. +func (d *decodeState) arrayInterface() []any { + var v = make([]any, 0) + for { + // Look ahead for ] - can only happen on first iteration. + d.scanWhile(scanSkipSpace) + if d.opcode == scanEndArray { + break + } + + v = append(v, d.valueInterface()) + + // Next token must be , or ]. + if d.opcode == scanSkipSpace { + d.scanWhile(scanSkipSpace) + } + if d.opcode == scanEndArray { + break + } + if d.opcode != scanArrayValue { + panic(phasePanicMsg) + } + } + return v +} + +// objectInterface is like object but returns map[string]interface{}. +func (d *decodeState) objectInterface() map[string]any { + m := make(map[string]any) + for { + // Read opening " of string key or closing }. + d.scanWhile(scanSkipSpace) + if d.opcode == scanEndObject { + // closing } - can only happen on first iteration. + break + } + if d.opcode != scanBeginLiteral { + panic(phasePanicMsg) + } + + // Read string key. + start := d.readIndex() + d.rescanLiteral() + item := d.data[start:d.readIndex()] + key, ok := unquote(item) + if !ok { + panic(phasePanicMsg) + } + + // Read : before value. + if d.opcode == scanSkipSpace { + d.scanWhile(scanSkipSpace) + } + if d.opcode != scanObjectKey { + panic(phasePanicMsg) + } + d.scanWhile(scanSkipSpace) + + // Read value. + m[key] = d.valueInterface() + + // Next token must be , or }. + if d.opcode == scanSkipSpace { + d.scanWhile(scanSkipSpace) + } + if d.opcode == scanEndObject { + break + } + if d.opcode != scanObjectValue { + panic(phasePanicMsg) + } + } + return m +} + +// literalInterface consumes and returns a literal from d.data[d.off-1:] and +// it reads the following byte ahead. The first byte of the literal has been +// read already (that's how the caller knows it's a literal). +func (d *decodeState) literalInterface() any { + // All bytes inside literal return scanContinue op code. + start := d.readIndex() + d.rescanLiteral() + + item := d.data[start:d.readIndex()] + + switch c := item[0]; c { + case 'n': // null + return nil + + case 't', 'f': // true, false + return c == 't' + + case '"': // string + s, ok := unquote(item) + if !ok { + panic(phasePanicMsg) + } + return s + + default: // number + if c != '-' && (c < '0' || c > '9') { + panic(phasePanicMsg) + } + n, err := d.convertNumber(string(item)) + if err != nil { + d.saveError(err) + } + return n + } +} + +// getu4 decodes \uXXXX from the beginning of s, returning the hex value, +// or it returns -1. +func getu4(s []byte) rune { + if len(s) < 6 || s[0] != '\\' || s[1] != 'u' { + return -1 + } + var r rune + for _, c := range s[2:6] { + switch { + case '0' <= c && c <= '9': + c = c - '0' + case 'a' <= c && c <= 'f': + c = c - 'a' + 10 + case 'A' <= c && c <= 'F': + c = c - 'A' + 10 + default: + return -1 + } + r = r*16 + rune(c) + } + return r +} + +// unquote converts a quoted JSON string literal s into an actual string t. +// The rules are different than for Go, so cannot use strconv.Unquote. +func unquote(s []byte) (t string, ok bool) { + s, ok = unquoteBytes(s) + t = string(s) + return +} + +func unquoteBytes(s []byte) (t []byte, ok bool) { + if len(s) < 2 || s[0] != '"' || s[len(s)-1] != '"' { + return + } + s = s[1 : len(s)-1] + + // Check for unusual characters. If there are none, + // then no unquoting is needed, so return a slice of the + // original bytes. + r := 0 + for r < len(s) { + c := s[r] + if c == '\\' || c == '"' || c < ' ' { + break + } + if c < utf8.RuneSelf { + r++ + continue + } + rr, size := utf8.DecodeRune(s[r:]) + if rr == utf8.RuneError && size == 1 { + break + } + r += size + } + if r == len(s) { + return s, true + } + + b := make([]byte, len(s)+2*utf8.UTFMax) + w := copy(b, s[0:r]) + for r < len(s) { + // Out of room? Can only happen if s is full of + // malformed UTF-8 and we're replacing each + // byte with RuneError. + if w >= len(b)-2*utf8.UTFMax { + nb := make([]byte, (len(b)+utf8.UTFMax)*2) + copy(nb, b[0:w]) + b = nb + } + switch c := s[r]; { + case c == '\\': + r++ + if r >= len(s) { + return + } + switch s[r] { + default: + return + case '"', '\\', '/', '\'': + b[w] = s[r] + r++ + w++ + case 'b': + b[w] = '\b' + r++ + w++ + case 'f': + b[w] = '\f' + r++ + w++ + case 'n': + b[w] = '\n' + r++ + w++ + case 'r': + b[w] = '\r' + r++ + w++ + case 't': + b[w] = '\t' + r++ + w++ + case 'u': + r-- + rr := getu4(s[r:]) + if rr < 0 { + return + } + r += 6 + if utf16.IsSurrogate(rr) { + rr1 := getu4(s[r:]) + if dec := utf16.DecodeRune(rr, rr1); dec != unicode.ReplacementChar { + // A valid pair; consume. + r += 6 + w += utf8.EncodeRune(b[w:], dec) + break + } + // Invalid surrogate; fall back to replacement rune. + rr = unicode.ReplacementChar + } + w += utf8.EncodeRune(b[w:], rr) + } + + // Quote, control characters are invalid. + case c == '"', c < ' ': + return + + // ASCII + case c < utf8.RuneSelf: + b[w] = c + r++ + w++ + + // Coerce to well-formed UTF-8. + default: + rr, size := utf8.DecodeRune(s[r:]) + r += size + w += utf8.EncodeRune(b[w:], rr) + } + } + return b[0:w], true +} + +// Valid reports whether data is a valid JSON encoding. +func Valid(data []byte) bool { + scan := newScanner() + defer freeScanner(scan) + return checkValid(data, scan) == nil +} + +// checkValid verifies that data is valid JSON-encoded data. +// scan is passed in for use by checkValid to avoid an allocation. +// checkValid returns nil or a SyntaxError. +func checkValid(data []byte, scan *scanner) error { + scan.reset() + for _, c := range data { + scan.bytes++ + if scan.step(scan, c) == scanError { + return scan.err + } + } + if scan.eof() == scanError { + return scan.err + } + return nil +} + +// A SyntaxError is a description of a JSON syntax error. +// [Unmarshal] will return a SyntaxError if the JSON can't be parsed. +type SyntaxError struct { + msg string // description of error + Offset int64 // error occurred after reading Offset bytes +} + +func (e *SyntaxError) Error() string { return e.msg } + +// A scanner is a JSON scanning state machine. +// Callers call scan.reset and then pass bytes in one at a time +// by calling scan.step(&scan, c) for each byte. +// The return value, referred to as an opcode, tells the +// caller about significant parsing events like beginning +// and ending literals, objects, and arrays, so that the +// caller can follow along if it wishes. +// The return value scanEnd indicates that a single top-level +// JSON value has been completed, *before* the byte that +// just got passed in. (The indication must be delayed in order +// to recognize the end of numbers: is 123 a whole value or +// the beginning of 12345e+6?). +type scanner struct { + // The step is a func to be called to execute the next transition. + // Also tried using an integer constant and a single func + // with a switch, but using the func directly was 10% faster + // on a 64-bit Mac Mini, and it's nicer to read. + step func(*scanner, byte) int + + // Reached end of top-level value. + endTop bool + + // Stack of what we're in the middle of - array values, object keys, object values. + parseState []int + + // Error that happened, if any. + err error + + // total bytes consumed, updated by decoder.Decode (and deliberately + // not set to zero by scan.reset) + bytes int64 +} + +var scannerPool = sync.Pool{ + New: func() any { + return &scanner{} + }, +} + +func newScanner() *scanner { + scan := scannerPool.Get().(*scanner) + // scan.reset by design doesn't set bytes to zero + scan.bytes = 0 + scan.reset() + return scan +} + +func freeScanner(scan *scanner) { + // Avoid hanging on to too much memory in extreme cases. + if len(scan.parseState) > 1024 { + scan.parseState = nil + } + scannerPool.Put(scan) +} + +// These values are returned by the state transition functions +// assigned to scanner.state and the method scanner.eof. +// They give details about the current state of the scan that +// callers might be interested to know about. +// It is okay to ignore the return value of any particular +// call to scanner.state: if one call returns scanError, +// every subsequent call will return scanError too. +const ( + // Continue. + scanContinue = iota // uninteresting byte + scanBeginLiteral // end implied by next result != scanContinue + scanBeginObject // begin object + scanObjectKey // just finished object key (string) + scanObjectValue // just finished non-last object value + scanEndObject // end object (implies scanObjectValue if possible) + scanBeginArray // begin array + scanArrayValue // just finished array value + scanEndArray // end array (implies scanArrayValue if possible) + scanSkipSpace // space byte; can skip; known to be last "continue" result + + // Stop. + scanEnd // top-level value ended *before* this byte; known to be first "stop" result + scanError // hit an error, scanner.err. +) + +// These values are stored in the parseState stack. +// They give the current state of a composite value +// being scanned. If the parser is inside a nested value +// the parseState describes the nested state, outermost at entry 0. +const ( + parseObjectKey = iota // parsing object key (before colon) + parseObjectValue // parsing object value (after colon) + parseArrayValue // parsing array value +) + +// This limits the max nesting depth to prevent stack overflow. +// This is permitted by https://tools.ietf.org/html/rfc7159#section-9 +const maxNestingDepth = 10000 + +// reset prepares the scanner for use. +// It must be called before calling s.step. +func (s *scanner) reset() { + s.step = stateBeginValue + s.parseState = s.parseState[0:0] + s.err = nil + s.endTop = false +} + +// eof tells the scanner that the end of input has been reached. +// It returns a scan status just as s.step does. +func (s *scanner) eof() int { + if s.err != nil { + return scanError + } + if s.endTop { + return scanEnd + } + s.step(s, ' ') + if s.endTop { + return scanEnd + } + if s.err == nil { + s.err = &SyntaxError{"unexpected end of JSON input", s.bytes} + } + return scanError +} + +// pushParseState pushes a new parse state p onto the parse stack. +// an error state is returned if maxNestingDepth was exceeded, otherwise successState is returned. +func (s *scanner) pushParseState(c byte, newParseState int, successState int) int { + s.parseState = append(s.parseState, newParseState) + if len(s.parseState) <= maxNestingDepth { + return successState + } + return s.error(c, "exceeded max depth") +} + +// popParseState pops a parse state (already obtained) off the stack +// and updates s.step accordingly. +func (s *scanner) popParseState() { + n := len(s.parseState) - 1 + s.parseState = s.parseState[0:n] + if n == 0 { + s.step = stateEndTop + s.endTop = true + } else { + s.step = stateEndValue + } +} + +func isSpace(c byte) bool { + return c <= ' ' && (c == ' ' || c == '\t' || c == '\r' || c == '\n') +} + +// stateBeginValueOrEmpty is the state after reading `[`. +func stateBeginValueOrEmpty(s *scanner, c byte) int { + if isSpace(c) { + return scanSkipSpace + } + if c == ']' { + return stateEndValue(s, c) + } + return stateBeginValue(s, c) +} + +// stateBeginValue is the state at the beginning of the input. +func stateBeginValue(s *scanner, c byte) int { + if isSpace(c) { + return scanSkipSpace + } + switch c { + case '{': + s.step = stateBeginStringOrEmpty + return s.pushParseState(c, parseObjectKey, scanBeginObject) + case '[': + s.step = stateBeginValueOrEmpty + return s.pushParseState(c, parseArrayValue, scanBeginArray) + case '"': + s.step = stateInString + return scanBeginLiteral + case '-': + s.step = stateNeg + return scanBeginLiteral + case '0': // beginning of 0.123 + s.step = state0 + return scanBeginLiteral + case 't': // beginning of true + s.step = stateT + return scanBeginLiteral + case 'f': // beginning of false + s.step = stateF + return scanBeginLiteral + case 'n': // beginning of null + s.step = stateN + return scanBeginLiteral + } + if '1' <= c && c <= '9' { // beginning of 1234.5 + s.step = state1 + return scanBeginLiteral + } + return s.error(c, "looking for beginning of value") +} + +// stateBeginStringOrEmpty is the state after reading `{`. +func stateBeginStringOrEmpty(s *scanner, c byte) int { + if isSpace(c) { + return scanSkipSpace + } + if c == '}' { + n := len(s.parseState) + s.parseState[n-1] = parseObjectValue + return stateEndValue(s, c) + } + return stateBeginString(s, c) +} + +// stateBeginString is the state after reading `{"key": value,`. +func stateBeginString(s *scanner, c byte) int { + if isSpace(c) { + return scanSkipSpace + } + if c == '"' { + s.step = stateInString + return scanBeginLiteral + } + return s.error(c, "looking for beginning of object key string") +} + +// stateEndValue is the state after completing a value, +// such as after reading `{}` or `true` or `["x"`. +func stateEndValue(s *scanner, c byte) int { + n := len(s.parseState) + if n == 0 { + // Completed top-level before the current byte. + s.step = stateEndTop + s.endTop = true + return stateEndTop(s, c) + } + if isSpace(c) { + s.step = stateEndValue + return scanSkipSpace + } + ps := s.parseState[n-1] + switch ps { + case parseObjectKey: + if c == ':' { + s.parseState[n-1] = parseObjectValue + s.step = stateBeginValue + return scanObjectKey + } + return s.error(c, "after object key") + case parseObjectValue: + if c == ',' { + s.parseState[n-1] = parseObjectKey + s.step = stateBeginString + return scanObjectValue + } + if c == '}' { + s.popParseState() + return scanEndObject + } + return s.error(c, "after object key:value pair") + case parseArrayValue: + if c == ',' { + s.step = stateBeginValue + return scanArrayValue + } + if c == ']' { + s.popParseState() + return scanEndArray + } + return s.error(c, "after array element") + } + return s.error(c, "") +} + +// stateEndTop is the state after finishing the top-level value, +// such as after reading `{}` or `[1,2,3]`. +// Only space characters should be seen now. +func stateEndTop(s *scanner, c byte) int { + if !isSpace(c) { + // Complain about non-space byte on next call. + s.error(c, "after top-level value") + } + return scanEnd +} + +// stateInString is the state after reading `"`. +func stateInString(s *scanner, c byte) int { + if c == '"' { + s.step = stateEndValue + return scanContinue + } + if c == '\\' { + s.step = stateInStringEsc + return scanContinue + } + if c < 0x20 { + return s.error(c, "in string literal") + } + return scanContinue +} + +// stateInStringEsc is the state after reading `"\` during a quoted string. +func stateInStringEsc(s *scanner, c byte) int { + switch c { + case 'b', 'f', 'n', 'r', 't', '\\', '/', '"': + s.step = stateInString + return scanContinue + case 'u': + s.step = stateInStringEscU + return scanContinue + } + return s.error(c, "in string escape code") +} + +// stateInStringEscU is the state after reading `"\u` during a quoted string. +func stateInStringEscU(s *scanner, c byte) int { + if '0' <= c && c <= '9' || 'a' <= c && c <= 'f' || 'A' <= c && c <= 'F' { + s.step = stateInStringEscU1 + return scanContinue + } + // numbers + return s.error(c, "in \\u hexadecimal character escape") +} + +// stateInStringEscU1 is the state after reading `"\u1` during a quoted string. +func stateInStringEscU1(s *scanner, c byte) int { + if '0' <= c && c <= '9' || 'a' <= c && c <= 'f' || 'A' <= c && c <= 'F' { + s.step = stateInStringEscU12 + return scanContinue + } + // numbers + return s.error(c, "in \\u hexadecimal character escape") +} + +// stateInStringEscU12 is the state after reading `"\u12` during a quoted string. +func stateInStringEscU12(s *scanner, c byte) int { + if '0' <= c && c <= '9' || 'a' <= c && c <= 'f' || 'A' <= c && c <= 'F' { + s.step = stateInStringEscU123 + return scanContinue + } + // numbers + return s.error(c, "in \\u hexadecimal character escape") +} + +// stateInStringEscU123 is the state after reading `"\u123` during a quoted string. +func stateInStringEscU123(s *scanner, c byte) int { + if '0' <= c && c <= '9' || 'a' <= c && c <= 'f' || 'A' <= c && c <= 'F' { + s.step = stateInString + return scanContinue + } + // numbers + return s.error(c, "in \\u hexadecimal character escape") +} + +// stateNeg is the state after reading `-` during a number. +func stateNeg(s *scanner, c byte) int { + if c == '0' { + s.step = state0 + return scanContinue + } + if '1' <= c && c <= '9' { + s.step = state1 + return scanContinue + } + return s.error(c, "in numeric literal") +} + +// state1 is the state after reading a non-zero integer during a number, +// such as after reading `1` or `100` but not `0`. +func state1(s *scanner, c byte) int { + if '0' <= c && c <= '9' { + s.step = state1 + return scanContinue + } + return state0(s, c) +} + +// state0 is the state after reading `0` during a number. +func state0(s *scanner, c byte) int { + if c == '.' { + s.step = stateDot + return scanContinue + } + if c == 'e' || c == 'E' { + s.step = stateE + return scanContinue + } + return stateEndValue(s, c) +} + +// stateDot is the state after reading the integer and decimal point in a number, +// such as after reading `1.`. +func stateDot(s *scanner, c byte) int { + if '0' <= c && c <= '9' { + s.step = stateDot0 + return scanContinue + } + return s.error(c, "after decimal point in numeric literal") +} + +// stateDot0 is the state after reading the integer, decimal point, and subsequent +// digits of a number, such as after reading `3.14`. +func stateDot0(s *scanner, c byte) int { + if '0' <= c && c <= '9' { + return scanContinue + } + if c == 'e' || c == 'E' { + s.step = stateE + return scanContinue + } + return stateEndValue(s, c) +} + +// stateE is the state after reading the mantissa and e in a number, +// such as after reading `314e` or `0.314e`. +func stateE(s *scanner, c byte) int { + if c == '+' || c == '-' { + s.step = stateESign + return scanContinue + } + return stateESign(s, c) +} + +// stateESign is the state after reading the mantissa, e, and sign in a number, +// such as after reading `314e-` or `0.314e+`. +func stateESign(s *scanner, c byte) int { + if '0' <= c && c <= '9' { + s.step = stateE0 + return scanContinue + } + return s.error(c, "in exponent of numeric literal") +} + +// stateE0 is the state after reading the mantissa, e, optional sign, +// and at least one digit of the exponent in a number, +// such as after reading `314e-2` or `0.314e+1` or `3.14e0`. +func stateE0(s *scanner, c byte) int { + if '0' <= c && c <= '9' { + return scanContinue + } + return stateEndValue(s, c) +} + +// stateT is the state after reading `t`. +func stateT(s *scanner, c byte) int { + if c == 'r' { + s.step = stateTr + return scanContinue + } + return s.error(c, "in literal true (expecting 'r')") +} + +// stateTr is the state after reading `tr`. +func stateTr(s *scanner, c byte) int { + if c == 'u' { + s.step = stateTru + return scanContinue + } + return s.error(c, "in literal true (expecting 'u')") +} + +// stateTru is the state after reading `tru`. +func stateTru(s *scanner, c byte) int { + if c == 'e' { + s.step = stateEndValue + return scanContinue + } + return s.error(c, "in literal true (expecting 'e')") +} + +// stateF is the state after reading `f`. +func stateF(s *scanner, c byte) int { + if c == 'a' { + s.step = stateFa + return scanContinue + } + return s.error(c, "in literal false (expecting 'a')") +} + +// stateFa is the state after reading `fa`. +func stateFa(s *scanner, c byte) int { + if c == 'l' { + s.step = stateFal + return scanContinue + } + return s.error(c, "in literal false (expecting 'l')") +} + +// stateFal is the state after reading `fal`. +func stateFal(s *scanner, c byte) int { + if c == 's' { + s.step = stateFals + return scanContinue + } + return s.error(c, "in literal false (expecting 's')") +} + +// stateFals is the state after reading `fals`. +func stateFals(s *scanner, c byte) int { + if c == 'e' { + s.step = stateEndValue + return scanContinue + } + return s.error(c, "in literal false (expecting 'e')") +} + +// stateN is the state after reading `n`. +func stateN(s *scanner, c byte) int { + if c == 'u' { + s.step = stateNu + return scanContinue + } + return s.error(c, "in literal null (expecting 'u')") +} + +// stateNu is the state after reading `nu`. +func stateNu(s *scanner, c byte) int { + if c == 'l' { + s.step = stateNul + return scanContinue + } + return s.error(c, "in literal null (expecting 'l')") +} + +// stateNul is the state after reading `nul`. +func stateNul(s *scanner, c byte) int { + if c == 'l' { + s.step = stateEndValue + return scanContinue + } + return s.error(c, "in literal null (expecting 'l')") +} + +// stateError is the state after reaching a syntax error, +// such as after reading `[1}` or `5.1.2`. +func stateError(s *scanner, c byte) int { + return scanError +} + +// error records an error and switches to the error state. +func (s *scanner) error(c byte, context string) int { + s.step = stateError + s.err = &SyntaxError{"invalid character " + quoteChar(c) + " " + context, s.bytes} + return scanError +} + +// quoteChar formats c as a quoted character literal. +func quoteChar(c byte) string { + // special cases - different from quoted strings + if c == '\'' { + return `'\''` + } + if c == '"' { + return `'"'` + } + + // use quoted string with different quotation marks + s := strconv.Quote(string(c)) + return "'" + s[1:len(s)-1] + "'" +} + +// Marshal returns the JSON encoding of v. +// +// Marshal traverses the value v recursively. +// If an encountered value implements [Marshaler] +// and is not a nil pointer, Marshal calls [Marshaler.MarshalJSON] +// to produce JSON. If no [Marshaler.MarshalJSON] method is present but the +// value implements [encoding.TextMarshaler] instead, Marshal calls +// [encoding.TextMarshaler.MarshalText] and encodes the result as a JSON string. +// The nil pointer exception is not strictly necessary +// but mimics a similar, necessary exception in the behavior of +// [Unmarshaler.UnmarshalJSON]. +// +// Otherwise, Marshal uses the following type-dependent default encodings: +// +// Boolean values encode as JSON booleans. +// +// Floating point, integer, and [Number] values encode as JSON numbers. +// NaN and +/-Inf values will return an [UnsupportedValueError]. +// +// String values encode as JSON strings coerced to valid UTF-8, +// replacing invalid bytes with the Unicode replacement rune. +// So that the JSON will be safe to embed inside HTML + +{#if visible} +
+ +{/if} diff --git a/frontend/src/lib/components/AutoRefresh.svelte b/frontend/src/lib/components/AutoRefresh.svelte new file mode 100644 index 0000000..50de60f --- /dev/null +++ b/frontend/src/lib/components/AutoRefresh.svelte @@ -0,0 +1,80 @@ + + +
+ Auto-Refresh +
diff --git a/frontend/src/lib/components/BigButton.svelte b/frontend/src/lib/components/BigButton.svelte new file mode 100644 index 0000000..505a5e1 --- /dev/null +++ b/frontend/src/lib/components/BigButton.svelte @@ -0,0 +1,16 @@ + + + diff --git a/frontend/src/lib/components/Button.svelte b/frontend/src/lib/components/Button.svelte new file mode 100644 index 0000000..086cab8 --- /dev/null +++ b/frontend/src/lib/components/Button.svelte @@ -0,0 +1,18 @@ + + + diff --git a/frontend/src/lib/components/CTAbutton.svelte b/frontend/src/lib/components/CTAbutton.svelte new file mode 100644 index 0000000..cec39c8 --- /dev/null +++ b/frontend/src/lib/components/CTAbutton.svelte @@ -0,0 +1,17 @@ + + +
+ +
diff --git a/frontend/src/lib/components/CampaignCalendar.svelte b/frontend/src/lib/components/CampaignCalendar.svelte new file mode 100644 index 0000000..0f0d395 --- /dev/null +++ b/frontend/src/lib/components/CampaignCalendar.svelte @@ -0,0 +1,514 @@ + + +
+
+ +
+ + +

+ {format(currentMonth, 'MMMM yyyy')} +

+ + +
+ + +
+ {#each [{ key: 'SCHEDULED', color: COLORS.SCHEDULED, label: 'Scheduled' }, { key: 'ACTIVE', color: COLORS.ACTIVE, label: 'Active' }, { key: 'COMPLETED', color: COLORS.COMPLETED, label: 'Completed' }, { key: 'SELF_MANAGED', color: COLORS.SELF_MANAGED, label: 'Self-managed' }] as item} + + {/each} +
+ + +
+ +
+ {#each ['Sun', 'Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat'] as day} +
{day}
+ {/each} +
+ + + {#if !isInitialized || isGeneratingCalendar || isLoadingNewMonth} +
+
+
+ Loading calendar... +
+
+ {:else} +
+ {#each weeks as week, weekIndex} +
+ {#each week as day} +
+ +
+ {day.date.getDate()} +
+ +
+ {#each day.campaigns as campaign} + +
+ {truncateText(campaign.name, 18)} +
+
+ {/each} +
+
+ {/each} +
+ {/each} +
+ {/if} +
+
+
+ + diff --git a/frontend/src/lib/components/CampaignTrendChart.svelte b/frontend/src/lib/components/CampaignTrendChart.svelte new file mode 100644 index 0000000..138723a --- /dev/null +++ b/frontend/src/lib/components/CampaignTrendChart.svelte @@ -0,0 +1,1029 @@ + + +
+ + + {#if !containerReady} +
+ Preparing chart… +
+ {:else} +
+ {#if debouncedIsLoading} +
+
+ Loading trend data... +
+ {:else if !hasAttemptedLoad && debouncedShowPending} +
+ Preparing trend data... +
+ {:else if hasAttemptedLoad && !isLoading && !debouncedIsLoading && chartData.length === 0} +
+
+ + + +

No campaign data

+

+ Campaign statistics will appear here once campaigns are completed. +

+
+
+ {:else if hasAttemptedLoad && !isLoading && !debouncedIsLoading && chartData.length === 1} +
+
+ + + +
+

Single Campaign Data

+

+ Trends will appear when you have 2 or more completed campaigns. +

+
+
+
+
+
{chartData[0].openRate}%
+
Open Rate
+
+
+
{chartData[0].clickRate}%
+
Click Rate
+
+
+
{chartData[0].submissionRate}%
+
Submission Rate
+
+
+
+ {:else if hasAttemptedLoad && !isLoading && !debouncedIsLoading && chartData.length >= 2} +
+ +
+

+ Trendline: Last {trendStats ? trendStats.n : chartData.length} Campaigns (average) +

+
+ + {#if chartData.length > 1} + + {/if} + +
+
+
+ {#if chartData.length > 0} +
+ {#each metrics as metric} +
+
+
+ {metric.label} +
+
+ + {trendStats && + typeof trendStats[metric.key] === 'number' && + !isNaN(trendStats[metric.key]) + ? trendStats[metric.key].toFixed(1) + '%' + : '—'} + +
+
+ {/each} +
+ {:else} +
+ No trendline stats to display (trendStats is null or not enough data). +
+ {/if} + {#key chartKey} + {#if containerReady} +
+ {/if} + {/key} + +
+ {/if} +
+ {/if} +
+ + diff --git a/frontend/src/lib/components/CheckboxField.svelte b/frontend/src/lib/components/CheckboxField.svelte new file mode 100644 index 0000000..21ac732 --- /dev/null +++ b/frontend/src/lib/components/CheckboxField.svelte @@ -0,0 +1,88 @@ + + + diff --git a/frontend/src/lib/components/ConfirmPrompt.svelte b/frontend/src/lib/components/ConfirmPrompt.svelte new file mode 100644 index 0000000..49d3c95 --- /dev/null +++ b/frontend/src/lib/components/ConfirmPrompt.svelte @@ -0,0 +1,198 @@ + + +{#if visible} +
+ +{/if} diff --git a/frontend/src/lib/components/DateField.svelte b/frontend/src/lib/components/DateField.svelte new file mode 100644 index 0000000..2510ba2 --- /dev/null +++ b/frontend/src/lib/components/DateField.svelte @@ -0,0 +1,97 @@ + + +{#if !noLabel} + +{/if} +
+ +
diff --git a/frontend/src/lib/components/DateTimeField.svelte b/frontend/src/lib/components/DateTimeField.svelte new file mode 100644 index 0000000..863e0f9 --- /dev/null +++ b/frontend/src/lib/components/DateTimeField.svelte @@ -0,0 +1,159 @@ + + + +
+ + +
diff --git a/frontend/src/lib/components/Datetime.svelte b/frontend/src/lib/components/Datetime.svelte new file mode 100644 index 0000000..d5ef0ba --- /dev/null +++ b/frontend/src/lib/components/Datetime.svelte @@ -0,0 +1,25 @@ + + +
+ {#if date} + {#if !hideHours} + {date.toLocaleString()} + {:else} + {date.toLocaleDateString()} + {/if} + {/if} +
diff --git a/frontend/src/lib/components/DeveloperPanel.svelte b/frontend/src/lib/components/DeveloperPanel.svelte new file mode 100644 index 0000000..20c479e --- /dev/null +++ b/frontend/src/lib/components/DeveloperPanel.svelte @@ -0,0 +1,102 @@ + + +{#if import.meta.env.DEV} +
+
+ +
+ {#if visible} +
+

Developer Panel

+

Links

+ +

Toast

+
    +
  • + +
  • +
  • + +
  • +
  • + +
  • +
  • + +
  • +
+
+

Global State

+ + {#each Object.entries(state) as [key, value]} + + + + + {/each} +
{key} + {#if typeof value === 'object'} +
{JSON.stringify(value, null, 2)}
+ {:else} +

+ {value} +

+ {/if} +
+
+
+ {/if} +
+{/if} diff --git a/frontend/src/lib/components/EventTimeline.svelte b/frontend/src/lib/components/EventTimeline.svelte new file mode 100644 index 0000000..c1a948b --- /dev/null +++ b/frontend/src/lib/components/EventTimeline.svelte @@ -0,0 +1,529 @@ + + +
+
+ + +
+ + diff --git a/frontend/src/lib/components/FileField.svelte b/frontend/src/lib/components/FileField.svelte new file mode 100644 index 0000000..368979f --- /dev/null +++ b/frontend/src/lib/components/FileField.svelte @@ -0,0 +1,73 @@ + + + diff --git a/frontend/src/lib/components/Form.svelte b/frontend/src/lib/components/Form.svelte new file mode 100644 index 0000000..fd59a8f --- /dev/null +++ b/frontend/src/lib/components/Form.svelte @@ -0,0 +1,15 @@ + + +
+ + diff --git a/frontend/src/lib/components/FormButton.svelte b/frontend/src/lib/components/FormButton.svelte new file mode 100644 index 0000000..3413c04 --- /dev/null +++ b/frontend/src/lib/components/FormButton.svelte @@ -0,0 +1,22 @@ + + + diff --git a/frontend/src/lib/components/FormColumn.svelte b/frontend/src/lib/components/FormColumn.svelte new file mode 100644 index 0000000..41fe4d4 --- /dev/null +++ b/frontend/src/lib/components/FormColumn.svelte @@ -0,0 +1,10 @@ + + +
+ +
diff --git a/frontend/src/lib/components/FormColumns.svelte b/frontend/src/lib/components/FormColumns.svelte new file mode 100644 index 0000000..f87d660 --- /dev/null +++ b/frontend/src/lib/components/FormColumns.svelte @@ -0,0 +1,10 @@ + + +
+ +
diff --git a/frontend/src/lib/components/FormError.svelte b/frontend/src/lib/components/FormError.svelte new file mode 100644 index 0000000..d96b23f --- /dev/null +++ b/frontend/src/lib/components/FormError.svelte @@ -0,0 +1,42 @@ + + +{#if message} +
+
+
+ + + + + + + +

+ {message} +

+
+
+
+{/if} diff --git a/frontend/src/lib/components/FormFlex.svelte b/frontend/src/lib/components/FormFlex.svelte new file mode 100644 index 0000000..71dded4 --- /dev/null +++ b/frontend/src/lib/components/FormFlex.svelte @@ -0,0 +1,11 @@ + + +
+ + diff --git a/frontend/src/lib/components/FormFooter.svelte b/frontend/src/lib/components/FormFooter.svelte new file mode 100644 index 0000000..099222b --- /dev/null +++ b/frontend/src/lib/components/FormFooter.svelte @@ -0,0 +1,31 @@ + + +
+ + {okText} +
diff --git a/frontend/src/lib/components/FormGrid.svelte b/frontend/src/lib/components/FormGrid.svelte new file mode 100644 index 0000000..7f6abf6 --- /dev/null +++ b/frontend/src/lib/components/FormGrid.svelte @@ -0,0 +1,16 @@ + + +
+ + diff --git a/frontend/src/lib/components/GhostText.svelte b/frontend/src/lib/components/GhostText.svelte new file mode 100644 index 0000000..6863ffa --- /dev/null +++ b/frontend/src/lib/components/GhostText.svelte @@ -0,0 +1,21 @@ + + +{#if center} +
+
 
+
+{:else} +
 
+{/if} diff --git a/frontend/src/lib/components/HeadTitle.svelte b/frontend/src/lib/components/HeadTitle.svelte new file mode 100644 index 0000000..c4ef384 --- /dev/null +++ b/frontend/src/lib/components/HeadTitle.svelte @@ -0,0 +1,7 @@ + + + + {title ?? ''} - Phishing Club + diff --git a/frontend/src/lib/components/HeaderFirst.svelte b/frontend/src/lib/components/HeaderFirst.svelte new file mode 100644 index 0000000..5abc0b4 --- /dev/null +++ b/frontend/src/lib/components/HeaderFirst.svelte @@ -0,0 +1,3 @@ +
+ logo +
\ No newline at end of file diff --git a/frontend/src/lib/components/Headline.svelte b/frontend/src/lib/components/Headline.svelte new file mode 100644 index 0000000..15f54ef --- /dev/null +++ b/frontend/src/lib/components/Headline.svelte @@ -0,0 +1,3 @@ +

+ +

diff --git a/frontend/src/lib/components/Hello.svelte b/frontend/src/lib/components/Hello.svelte new file mode 100644 index 0000000..5d7a5c8 --- /dev/null +++ b/frontend/src/lib/components/Hello.svelte @@ -0,0 +1,8 @@ + + +
+ Hello {name}! +
diff --git a/frontend/src/lib/components/Input.svelte b/frontend/src/lib/components/Input.svelte new file mode 100644 index 0000000..586cd82 --- /dev/null +++ b/frontend/src/lib/components/Input.svelte @@ -0,0 +1,37 @@ + + +
+ + { + const t = /** @type {HTMLInputElement} */ (event.target); + value = t.value; + // If submitOnEnter is true and Enter was pressed, submit the closest form + if (submitOnEnter && event.key === 'Enter') { + const form = /** @type {HTMLElement} */ (event.target).closest('form'); + if (form) { + form.requestSubmit(); + } + } + }} + {value} + required + autocomplete="off" + {type} + id={fieldName} + name={fieldName} + class="w-full p-2 rounded bg-pc-lightblue focus:outline-none focus:ring-0 focus:border-cta-blue focus:border-2" + /> +
diff --git a/frontend/src/lib/components/Loader.svelte b/frontend/src/lib/components/Loader.svelte new file mode 100644 index 0000000..cbc4de0 --- /dev/null +++ b/frontend/src/lib/components/Loader.svelte @@ -0,0 +1,27 @@ + + +{#if $isLoading && isAnimating} +
+
+
+
+{/if} diff --git a/frontend/src/lib/components/Modal.svelte b/frontend/src/lib/components/Modal.svelte new file mode 100644 index 0000000..83c7ea7 --- /dev/null +++ b/frontend/src/lib/components/Modal.svelte @@ -0,0 +1,281 @@ + + +{#if visible} +
+
+ +
+{/if} diff --git a/frontend/src/lib/components/Pagination.svelte b/frontend/src/lib/components/Pagination.svelte new file mode 100644 index 0000000..76b6227 --- /dev/null +++ b/frontend/src/lib/components/Pagination.svelte @@ -0,0 +1,54 @@ + + +
+ +
{currentPage}
+ +
diff --git a/frontend/src/lib/components/PasswordField.svelte b/frontend/src/lib/components/PasswordField.svelte new file mode 100644 index 0000000..41ae191 --- /dev/null +++ b/frontend/src/lib/components/PasswordField.svelte @@ -0,0 +1,118 @@ + + + diff --git a/frontend/src/lib/components/RelativeTime.svelte b/frontend/src/lib/components/RelativeTime.svelte new file mode 100644 index 0000000..031a778 --- /dev/null +++ b/frontend/src/lib/components/RelativeTime.svelte @@ -0,0 +1,93 @@ + + + + {formattedTime} + diff --git a/frontend/src/lib/components/RootLoader.svelte b/frontend/src/lib/components/RootLoader.svelte new file mode 100644 index 0000000..1627865 --- /dev/null +++ b/frontend/src/lib/components/RootLoader.svelte @@ -0,0 +1,33 @@ + + +
diff --git a/frontend/src/lib/components/Search.svelte b/frontend/src/lib/components/Search.svelte new file mode 100644 index 0000000..0cad41a --- /dev/null +++ b/frontend/src/lib/components/Search.svelte @@ -0,0 +1,45 @@ + + +
+ search icon + { + if (pagination && pagination.search !== null) { + setSearch(); + } + }} + class="bg-grayblue-light w-56 border text-gray-600 border-gray-300 pl-8 py-2 relative rounded-lg focus:outline-none focus:ring-0 focus:border-cta-blue focus:border" + placeholder="Search" + /> +
diff --git a/frontend/src/lib/components/Select.svelte b/frontend/src/lib/components/Select.svelte new file mode 100644 index 0000000..e241045 --- /dev/null +++ b/frontend/src/lib/components/Select.svelte @@ -0,0 +1,45 @@ + + +
+ + +
diff --git a/frontend/src/lib/components/SelectSquare.svelte b/frontend/src/lib/components/SelectSquare.svelte new file mode 100644 index 0000000..d662f97 --- /dev/null +++ b/frontend/src/lib/components/SelectSquare.svelte @@ -0,0 +1,67 @@ + + +
+ {#if label} +
+
{label}
+ {#if toolTipText.length > 0} + + {toolTipText} + + {/if} + {#if optional} +
+

optional

+
+ {/if} +
+ {/if} + +
+ {#each options as option} + + {/each} +
+
diff --git a/frontend/src/lib/components/StatsCard.svelte b/frontend/src/lib/components/StatsCard.svelte new file mode 100644 index 0000000..8fad740 --- /dev/null +++ b/frontend/src/lib/components/StatsCard.svelte @@ -0,0 +1,125 @@ + + +
+
{title}
+
+
+ + {Math.floor($displayValue)} + +
+ + + + + +
+
+
+ + {#if validPercentages.length > 0} + + {/if} +
+ + diff --git a/frontend/src/lib/components/SubHeadline.svelte b/frontend/src/lib/components/SubHeadline.svelte new file mode 100644 index 0000000..a65b988 --- /dev/null +++ b/frontend/src/lib/components/SubHeadline.svelte @@ -0,0 +1,3 @@ +

+ +

diff --git a/frontend/src/lib/components/TestLabel.svelte b/frontend/src/lib/components/TestLabel.svelte new file mode 100644 index 0000000..f08b876 --- /dev/null +++ b/frontend/src/lib/components/TestLabel.svelte @@ -0,0 +1,6 @@ + + test + diff --git a/frontend/src/lib/components/TextField.svelte b/frontend/src/lib/components/TextField.svelte new file mode 100644 index 0000000..1f4b37e --- /dev/null +++ b/frontend/src/lib/components/TextField.svelte @@ -0,0 +1,102 @@ + + + diff --git a/frontend/src/lib/components/TextFieldMultiSelect.svelte b/frontend/src/lib/components/TextFieldMultiSelect.svelte new file mode 100644 index 0000000..60b7ce6 --- /dev/null +++ b/frontend/src/lib/components/TextFieldMultiSelect.svelte @@ -0,0 +1,203 @@ + + +
+ + +
+
+ { + if (!options.includes(inputValue)) { + inputValue = ''; + } + }} + on:change={onChange} + on:keyup={onKeyUp} + on:click|stopPropagation={() => {}} + {id} + required={required && !value.length} + autocomplete="off" + class="w-full relative rounded-md py-2 pl-4 focus:pl-10 text-gray-600 border border-transparent focus:outline-none focus:border-solid focus:border focus:border-slate-400 focus:bg-gray-100 bg-grayblue-light font-normal cursor-pointer focus:cursor-text" + /> + {#if showSelection} + search + {/if} + drop down +
+ {#if showSelection} +
+
    + {#if options.length} + {#each filteredOptions as option} +
  • + +
  • + {/each} + {:else} +
  • List is empty
  • + {/if} +
+
+ {/if} +
+ {#each value as option} + + {/each} +
+
+
diff --git a/frontend/src/lib/components/TextFieldSearchSelect.svelte b/frontend/src/lib/components/TextFieldSearchSelect.svelte new file mode 100644 index 0000000..783b2d7 --- /dev/null +++ b/frontend/src/lib/components/TextFieldSearchSelect.svelte @@ -0,0 +1,132 @@ + + +
+ +
+
+ { + if (!options.includes(value)) { + value = ''; + } + }} + on:keyup={_onKeyUp} + on:click|stopPropagation={() => {}} + autocomplete="off" + class="w-full relative rounded-md py-2 pl-4 focus:pl-10 text-gray-600 border border-transparent focus:outline-none focus:border-solid focus:border focus:border-slate-400 focus:bg-gray-100 bg-grayblue-light font-normal cursor-pointer focus:cursor-text" + {id} + {required} + /> + {#if showSelection} + search + {/if} + drop down +
+ {#if options.length && showSelection} +
+
    + {#each options as option} +
  • + +
  • + {/each} +
+
+ {/if} +
+
diff --git a/frontend/src/lib/components/TextFieldSelect.svelte b/frontend/src/lib/components/TextFieldSelect.svelte new file mode 100644 index 0000000..cb566b0 --- /dev/null +++ b/frontend/src/lib/components/TextFieldSelect.svelte @@ -0,0 +1,348 @@ + + +
+ +
+
+ + + {#if showDropdown} + + {/if} + + {#if optional === true && hasValue} + + {/if} + + +
+ + {#if showDropdown} +
+
    + {#if allOptions.length} + {#each allOptions as option, index} +
  • + +
  • + {/each} + {:else} +
  • + No options available +
  • + {/if} +
+
+ {/if} +
+
diff --git a/frontend/src/lib/components/TextareaField.svelte b/frontend/src/lib/components/TextareaField.svelte new file mode 100644 index 0000000..cc03647 --- /dev/null +++ b/frontend/src/lib/components/TextareaField.svelte @@ -0,0 +1,85 @@ + + +