Publish SysReptor community edition
This commit is contained in:
commit
4fede4590d
|
@ -0,0 +1,29 @@
|
|||
.idea/
|
||||
.vscode/
|
||||
|
||||
.git/
|
||||
.gitignore
|
||||
.gitlab-ci-yml
|
||||
.gitkeep
|
||||
|
||||
.env*
|
||||
**/.env*
|
||||
__pycache__
|
||||
**/__pycache__
|
||||
*.pyc
|
||||
**/*.pyc
|
||||
*.log
|
||||
**/*.log
|
||||
*.sql
|
||||
**/*.sql
|
||||
*.egg-info
|
||||
**/*.egg-info
|
||||
|
||||
Dockerfile
|
||||
docker-compose.yml
|
||||
.dockerignore
|
||||
|
||||
node_modules/
|
||||
**/node_modules/
|
||||
|
||||
api/data
|
|
@ -0,0 +1,5 @@
|
|||
.idea
|
||||
app.env
|
||||
**/app.env
|
||||
.DS_Store
|
||||
.scannerwork
|
|
@ -0,0 +1,92 @@
|
|||
# Changelog
|
||||
|
||||
## v0.76 - 2023-05-02
|
||||
* Release Community Edition
|
||||
* Add license checks and enforce license limits
|
||||
* Project archiving and encryption with 4-eye principle
|
||||
* Improve list editing in markdown editor
|
||||
* Add a refresh PDF button to the publish project page
|
||||
|
||||
|
||||
## v0.19 - 2023-04-11
|
||||
* Add private designs visible only to your user
|
||||
* Support Postgres with PgBouncer in LanguageTool
|
||||
* Allow storing files in S3 buckets
|
||||
* Fix backup restore failing for notifications
|
||||
|
||||
|
||||
## v0.18 - 2023-03-13
|
||||
* Allow setting emojis as custom note icons
|
||||
* Require re-authentication to enable admin permissions in user sessions
|
||||
* Test and improve backup and restore logic
|
||||
* Automatically cleanup unreferenced files and images
|
||||
* Add words to spellcheck dictionary
|
||||
* Allow removing and updating roles of imported project members
|
||||
* Fix label not shown for number fields
|
||||
|
||||
|
||||
## v0.17 - 2023-03-01
|
||||
* Use variable Open Sans font to fix footnote-call rendering ("font-variant-position: super" not applied)
|
||||
|
||||
|
||||
## v0.16 - 2023-02-23
|
||||
* Personal and per-project notes
|
||||
* Use asgi instead of wsgi to support async requests
|
||||
* Async PDF rendering and spellcheck request
|
||||
* Support Elastic APM for API and frontend monitoring
|
||||
* Fetch and display notifications to users
|
||||
* Add titles to pages in frontend
|
||||
|
||||
|
||||
## v0.15 - 2023-02-06
|
||||
* Support login via OpenID Connect
|
||||
* Support offloading PDF rendering to a pool of worker instances
|
||||
* Spellchecking and highlighting TODOs in string fields
|
||||
* Make toolbar sticky on top of finding, section and template editor
|
||||
* Separate scrollbars for side menu and main content
|
||||
* Rework PDF Viewer
|
||||
|
||||
|
||||
## v0.14 - 2023-01-03
|
||||
* Data-at-rest encryption for files and sensitive DB data
|
||||
* Use Session cookies instead of JWT tokens
|
||||
* Support two factor authentication with FIDO2, TOTP and Backup Codes
|
||||
* Add user role and permissions for system users
|
||||
* Support encrypting backups
|
||||
|
||||
|
||||
## v0.13 - 2022-12-16
|
||||
* Add logo and favicon
|
||||
* Add per-project user tags
|
||||
* UI Improvement: create finding dialog: reset template search input after closing dialog, set search query as finding title for new empty findings
|
||||
* UI Improvement: allow text selection in Markdown editor preview area
|
||||
|
||||
|
||||
## v0.12 - 2022-12-05
|
||||
* Provide some standard fonts in the docker container
|
||||
* Customize designs per project
|
||||
* Allow force changing designs of projects if the old and new design are incompatible
|
||||
* Update Chromium to fix CVE-2022-4262 (high)
|
||||
|
||||
|
||||
## v0.11 - 2022-11-25
|
||||
* Compress images to reduce storage size and PDF size
|
||||
* Manual highlighting of text in markdown code blocks
|
||||
* Add review status to sections, findings and templates
|
||||
* UI improvements: rework texts, add icons, more detailed error messages, group warnings by type in the publish page
|
||||
* Fix rendering of lists of users containing imported project users
|
||||
|
||||
|
||||
## Initial - 2022-11-16
|
||||
* Begin of changelog
|
||||
* Collaboratively write pentesting reports
|
||||
* Render reports to PDF
|
||||
* Customize report designs to your needs
|
||||
* Finding Template library
|
||||
* Export and import designs/templates/projects to share data
|
||||
* Multi Language support: Engilsh and German
|
||||
* Spell checking
|
||||
* Edit locking
|
||||
* Drag-and-drop image upload
|
||||
* PDF encryption
|
||||
* and many more features
|
|
@ -0,0 +1,147 @@
|
|||
FROM node:16-alpine@sha256:710a2c192ca426e03e4f3ec1869e5c29db855eb6969b74e6c50fd270ffccd3f1 AS pdfviewer-dev
|
||||
WORKDIR /app/packages/pdfviewer/
|
||||
COPY packages/pdfviewer/package.json packages/pdfviewer/package-lock.json /app/packages/pdfviewer//
|
||||
RUN npm install
|
||||
|
||||
FROM pdfviewer-dev AS pdfviewer
|
||||
COPY packages/pdfviewer /app/packages/pdfviewer//
|
||||
RUN npm run build
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
FROM node:16-alpine@sha256:710a2c192ca426e03e4f3ec1869e5c29db855eb6969b74e6c50fd270ffccd3f1 AS frontend-dev
|
||||
|
||||
WORKDIR /app/packages/markdown/
|
||||
COPY packages/markdown/package.json packages/markdown/package-lock.json /app/packages/markdown/
|
||||
RUN npm install
|
||||
|
||||
WORKDIR /app/frontend
|
||||
COPY frontend/package.json frontend/package-lock.json /app/frontend/
|
||||
RUN npm install
|
||||
|
||||
|
||||
FROM frontend-dev AS frontend-test
|
||||
COPY packages/markdown/ /app/packages/markdown/
|
||||
COPY frontend /app/frontend/
|
||||
COPY --from=pdfviewer /app/packages/pdfviewer/dist/ /app/frontend/static/static/pdfviewer/
|
||||
CMD npm run test
|
||||
|
||||
|
||||
FROM frontend-test AS frontend
|
||||
RUN npm run build
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
FROM node:16-alpine@sha256:710a2c192ca426e03e4f3ec1869e5c29db855eb6969b74e6c50fd270ffccd3f1 AS rendering-dev
|
||||
|
||||
WORKDIR /app/packages/markdown/
|
||||
COPY packages/markdown/package.json packages/markdown/package-lock.json /app/packages/markdown/
|
||||
RUN npm install
|
||||
|
||||
WORKDIR /app/rendering/
|
||||
COPY rendering/package.json rendering/package-lock.json /app/rendering/
|
||||
RUN npm install
|
||||
|
||||
|
||||
FROM rendering-dev AS rendering
|
||||
COPY rendering /app/rendering/
|
||||
COPY packages/markdown/ /app/packages/markdown/
|
||||
RUN npm run build
|
||||
|
||||
|
||||
|
||||
|
||||
FROM python:3.10-slim-bullseye@sha256:89648909125f37eeff6dee35491e6295c77b76c42aa1aff2523478990e73d3fe AS api-dev
|
||||
|
||||
# Install system dependencies required by weasyprint and chromium
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
chromium \
|
||||
curl \
|
||||
fontconfig \
|
||||
fonts-noto \
|
||||
fonts-noto-mono \
|
||||
fonts-noto-ui-core \
|
||||
fonts-noto-color-emoji \
|
||||
gpg \
|
||||
gpg-agent \
|
||||
libpango-1.0-0 \
|
||||
libpangoft2-1.0-0 \
|
||||
unzip \
|
||||
wget \
|
||||
postgresql-client \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Install fonts
|
||||
WORKDIR /app/api/
|
||||
COPY api/download_fonts.sh api/generate_notice.sh api/NOTICE /app/api/
|
||||
RUN chmod +x /app/api/download_fonts.sh && /app/api/download_fonts.sh
|
||||
|
||||
# Install python packages
|
||||
ENV PYTHONUNBUFFERED=on \
|
||||
PYTHONDONTWRITEBYTECODE=on \
|
||||
CHROMIUM_EXECUTABLE=/usr/lib/chromium/chromium
|
||||
WORKDIR /app/api/
|
||||
COPY api/requirements.txt /app/api/requirements.txt
|
||||
RUN pip install -r /app/api/requirements.txt
|
||||
|
||||
# Configure application
|
||||
ARG VERSION=dev
|
||||
ENV VERSION=${VERSION} \
|
||||
DEBUG=off \
|
||||
MEDIA_ROOT=/data/ \
|
||||
SERVER_WORKERS=4 \
|
||||
PDF_RENDER_SCRIPT_PATH=/app/rendering/dist/bundle.js
|
||||
|
||||
# Copy license and changelog
|
||||
COPY LICENSE CHANGELOG.md /app/
|
||||
|
||||
# Start server
|
||||
EXPOSE 8000
|
||||
CMD python3 manage.py migrate && \
|
||||
gunicorn \
|
||||
--bind=:8000 --worker-class=uvicorn.workers.UvicornWorker --workers=${SERVER_WORKERS} \
|
||||
--max-requests=500 --max-requests-jitter=100 \
|
||||
reportcreator_api.conf.asgi:application
|
||||
|
||||
|
||||
|
||||
FROM api-dev as api-prebuilt
|
||||
|
||||
# Copy source code (including pre-build static files)
|
||||
COPY api/src /app/api
|
||||
COPY rendering/dist /app/rendering/dist/
|
||||
|
||||
# Create data directory
|
||||
RUN mkdir /data && chown 1000:1000 /data && chmod 777 /data
|
||||
VOLUME [ "/data" ]
|
||||
USER 1000
|
||||
|
||||
|
||||
|
||||
FROM api-dev AS api-test
|
||||
# Copy source code
|
||||
COPY api/src /app/api
|
||||
|
||||
# Copy generated template rendering script
|
||||
COPY --from=rendering /app/rendering/dist /app/rendering/dist/
|
||||
CMD pytest
|
||||
|
||||
|
||||
FROM api-test as api
|
||||
# Generate static frontend files
|
||||
# Post-process django files (for admin, API browser) and post-process them (e.g. add unique file hash)
|
||||
# Do not post-process nuxt files, because they already have hash names (and django failes to post-process them)
|
||||
USER root
|
||||
RUN python3 manage.py collectstatic --no-input --clear
|
||||
COPY --from=frontend /app/frontend/dist/ /app/api/frontend/
|
||||
RUN python3 manage.py collectstatic --no-input --no-post-process \
|
||||
&& python3 -m whitenoise.compress /app/api/static/
|
||||
USER 1000
|
|
@ -0,0 +1,117 @@
|
|||
# SysReptor Community License 1.0 (SysReptorL)
|
||||
## Acceptance
|
||||
|
||||
In order to get any Permissions to Use the Software under the
|
||||
SysReptorL, you must agree to it as both strict obligations
|
||||
and conditions to all your Licenses.
|
||||
|
||||
## Copyright License
|
||||
|
||||
The licensor grants you a non-exclusive copyright Permission
|
||||
to Use the Software for everything you might do with the Software
|
||||
that would otherwise infringe the licensor's copyright in it for
|
||||
any permitted purpose, other than distributing the software or
|
||||
making changes or new works based on the Software. Attempts to
|
||||
circumvent technical License restrictions are prohibited (e.g.
|
||||
to unlock or extend functionalities), even if they result from
|
||||
errors in the Software.
|
||||
|
||||
## Patent License
|
||||
|
||||
The licensor grants you a non-exclusive patent License for the
|
||||
Software that covers patent claims the licensor can license, or
|
||||
becomes able to license, that you would infringe by using the
|
||||
Software after its Intended Use.
|
||||
|
||||
## Internal Business Use
|
||||
|
||||
Use of the Software for the internal business operations of
|
||||
you and your Company is use for a permitted purpose.
|
||||
|
||||
## Personal Uses
|
||||
|
||||
Personal use for research, experiment, and testing for the
|
||||
benefit of public knowledge, personal study, private entertainment,
|
||||
hobby projects, amateur pursuits, or religious observance,
|
||||
without any anticipated commercial application, is use for a
|
||||
permitted purpose.
|
||||
|
||||
## Fair Use
|
||||
|
||||
You may have "**Fair Use**" rights for the Software under the law.
|
||||
The SysReptorL does not limit them unless otherwise agreed.
|
||||
|
||||
Pursuant to Section 40d of the Act on Copyright and Related
|
||||
Rights (Urheberrechtsgesetz, UrhG), computer programs may be
|
||||
edited and reproduced within the framework of the Fair Use of
|
||||
works to the extent that this is necessary for the Intended
|
||||
Use of the Software by the person entitled to use it. The
|
||||
**Intended Use** is limited to the permitted purpose of the Software
|
||||
in accordance with the SysReptorL.
|
||||
|
||||
## No Other Rights
|
||||
|
||||
The SysReptorL does not allow you to sublicense or transfer
|
||||
any of your Licenses to anyone else or prevent the licensor
|
||||
from granting Licenses to anyone else. The SysReptorL does not
|
||||
imply any other Licenses than those mentioned therein.
|
||||
|
||||
## Patent Defense
|
||||
|
||||
If you make any written claim that the Software infringes or
|
||||
contributes to infringement of any patent, your patent License
|
||||
for the Software granted under this SysReptorL ends immediately. If
|
||||
your Company makes such a claim, your patent License ends
|
||||
immediately for work on behalf of your Company. Irrespective of the
|
||||
withdrawal of Permission to Use the Software, we reserve the right
|
||||
to assert claims for damages.
|
||||
|
||||
## Violations
|
||||
|
||||
The first time you are notified in writing that you have
|
||||
violated any of these terms, or done anything with the software
|
||||
not covered by your licenses, your licenses can nonetheless
|
||||
continue if you come into full compliance with these terms,
|
||||
and take practical steps to correct past violations, within
|
||||
32 days of receiving notice. Otherwise, all your licenses
|
||||
end immediately.
|
||||
|
||||
## No Liability
|
||||
|
||||
***As far as the law allows, the Software comes “as is”, without
|
||||
any warranty or condition, and the licensor will not be liable
|
||||
to you for any damages arising out of this SysReptorL or the use
|
||||
or nature of the Software, under any kind of legal claim.***
|
||||
|
||||
## Definitions
|
||||
|
||||
The SysReptor Community License 1.0 (**SysReptorL**) is granted by
|
||||
Syslifters GmbH, FN 578505v, registered office Göllersdorf
|
||||
(**Syslifters**; **we**; **licensor**) to **you**.
|
||||
|
||||
**License**: Is the overall term for the authorization to use the
|
||||
Software. The term "License" says nothing about the copyright
|
||||
classification.
|
||||
|
||||
**Software**: is the software the licensor makes available under
|
||||
these terms.
|
||||
|
||||
**Permission to Use the Software** (*Werknutzungsbewilligung*):
|
||||
Non-exclusive copyright Permission to Use the Software. **Use**
|
||||
means anything you do with the software requiring one of your
|
||||
licenses.
|
||||
|
||||
**Your Company**: Is any legal entity, sole proprietorship, or
|
||||
other kind of organization that you work for, plus all organizations
|
||||
that have control over, are under the control of, or are under common
|
||||
control with that organization. **Control** means ownership of
|
||||
substantially all the assets of an entity, or the power to direct its
|
||||
management and policies by vote, contract, or otherwise. Control can
|
||||
be direct or indirect.
|
||||
|
||||
**Your licenses** are all the licenses granted to you for the
|
||||
software under these terms.
|
||||
|
||||
|
||||
------------
|
||||
**Last Updated:** 24 March 2023
|
|
@ -0,0 +1,53 @@
|
|||
<a href="https://github.com/syslifters/sysreptor/">
|
||||
<img src="https://img.shields.io/github/stars/Syslifters/sysreptor?color=yellow&style=flat-square">
|
||||
</a>
|
||||
<a href="https://github.com/syslifters/sysreptor/releases/latest">
|
||||
<img src="https://img.shields.io/github/v/release/syslifters/sysreptor?color=green&style=flat-square">
|
||||
</a>
|
||||
<a href="https://github.com/syslifters/sysreptor/releases/latest">
|
||||
<img src="https://img.shields.io/github/release-date/syslifters/sysreptor?color=blue&style=flat-square">
|
||||
</a>
|
||||
<a href="https://github.com/syslifters/sysreptor/releases/latest">
|
||||
<img src="https://img.shields.io/github/repo-size/syslifters/sysreptor?color=red&style=flat-square">
|
||||
</a>
|
||||
<a href="https://www.linkedin.com/company/syslifters/">
|
||||
<img src="https://img.shields.io/badge/-Linkedin-blue?style=flat-square&logo=linkedin">
|
||||
</a>
|
||||
<a href="https://twitter.com/intent/user?screen_name=sysreptor">
|
||||
<img src="https://img.shields.io/twitter/follow/sysreptor?style=social">
|
||||
</a><br>
|
||||
|
||||
# SysReptor - Pentest Reporting Easy As Pie
|
||||
|
||||
📝 Write in markdown<br>
|
||||
✏️ Design in HTML/VueJS<br>
|
||||
⚙️ Render your report to PDF<br>
|
||||
🚀 Fully customizable<br>
|
||||
💻 Self-hosted or Cloud<br>
|
||||
🎉 No need for Word<br>
|
||||
|
||||
Happy Hacking! :)
|
||||
|
||||
|
||||
* Playground: [Try it out!](https://cloud.sysreptor.com/demo)
|
||||
* Docs: https://docs.sysreptor.com/
|
||||
* Features: https://docs.sysreptor.com/features-and-pricing/
|
||||
|
||||
|
||||
## Installation
|
||||
You will need:
|
||||
* Ubuntu
|
||||
* Latest Docker with Docker Compose v2
|
||||
|
||||
```bash
|
||||
curl -s https://docs.sysreptor.com/install.sh | bash
|
||||
```
|
||||
|
||||
Access your application at http://localhost:8000/.
|
||||
|
||||
|
||||
![Create finding from template](https://docs.sysreptor.com/images/create_finding_from_template.gif)
|
||||
|
||||
![Export report as PDF](https://docs.sysreptor.com/images/export_project.gif)
|
||||
|
||||
|
|
@ -0,0 +1,147 @@
|
|||
# Django #
|
||||
*.log
|
||||
*.pot
|
||||
*.pyc
|
||||
__pycache__
|
||||
db.sqlite3
|
||||
media
|
||||
|
||||
# Backup files #
|
||||
*.bak
|
||||
|
||||
# If you are using PyCharm #
|
||||
# User-specific stuff
|
||||
.idea/**/workspace.xml
|
||||
.idea/**/tasks.xml
|
||||
.idea/**/usage.statistics.xml
|
||||
.idea/**/dictionaries
|
||||
.idea/**/shelf
|
||||
|
||||
# AWS User-specific
|
||||
.idea/**/aws.xml
|
||||
|
||||
# Generated files
|
||||
.idea/**/contentModel.xml
|
||||
|
||||
# Sensitive or high-churn files
|
||||
.idea/**/dataSources/
|
||||
.idea/**/dataSources.ids
|
||||
.idea/**/dataSources.local.xml
|
||||
.idea/**/sqlDataSources.xml
|
||||
.idea/**/dynamic.xml
|
||||
.idea/**/uiDesigner.xml
|
||||
.idea/**/dbnavigator.xml
|
||||
|
||||
# Gradle
|
||||
.idea/**/gradle.xml
|
||||
.idea/**/libraries
|
||||
|
||||
# File-based project format
|
||||
*.iws
|
||||
|
||||
# IntelliJ
|
||||
out/
|
||||
|
||||
# JIRA plugin
|
||||
atlassian-ide-plugin.xml
|
||||
|
||||
# Python #
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
|
||||
# Distribution / packaging
|
||||
.Python build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
wheels/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
*.manifest
|
||||
*.spec
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
pip-delete-this-directory.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
htmlcov/
|
||||
.tox/
|
||||
.coverage
|
||||
.coverage.*
|
||||
.cache
|
||||
.pytest_cache/
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
*.cover
|
||||
.hypothesis/
|
||||
**/junit.xml
|
||||
test-reports
|
||||
|
||||
# Jupyter Notebook
|
||||
.ipynb_checkpoints
|
||||
|
||||
# pyenv
|
||||
.python-version
|
||||
|
||||
# celery
|
||||
celerybeat-schedule.*
|
||||
|
||||
# SageMath parsed files
|
||||
*.sage.py
|
||||
|
||||
# Environments
|
||||
.env
|
||||
.venv
|
||||
env/
|
||||
venv/
|
||||
ENV/
|
||||
env.bak/
|
||||
venv.bak/
|
||||
|
||||
# mkdocs documentation
|
||||
/site
|
||||
|
||||
# mypy
|
||||
.mypy_cache/
|
||||
|
||||
# Sublime Text #
|
||||
*.tmlanguage.cache
|
||||
*.tmPreferences.cache
|
||||
*.stTheme.cache
|
||||
*.sublime-workspace
|
||||
*.sublime-project
|
||||
|
||||
# sftp configuration file
|
||||
sftp-config.json
|
||||
|
||||
# Package control specific files Package
|
||||
Control.last-run
|
||||
Control.ca-list
|
||||
Control.ca-bundle
|
||||
Control.system-ca-bundle
|
||||
GitHub.sublime-settings
|
||||
|
||||
# Visual Studio Code #
|
||||
.vscode/*
|
||||
!.vscode/settings.json
|
||||
!.vscode/tasks.json
|
||||
!.vscode/launch.json
|
||||
!.vscode/extensions.json
|
||||
.history
|
||||
|
||||
data
|
||||
src/static
|
||||
src/frontend/*
|
||||
src/frontend/*/static/*
|
||||
!src/frontend/index.html
|
||||
!src/frontend/static
|
||||
!src/frontend/static/.gitkeep
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1 @@
|
|||
# ReportCreator API
|
|
@ -0,0 +1,32 @@
|
|||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
# Download google fonts
|
||||
while IFS= read -r fontname; do
|
||||
FONTNAME_URL=$(echo "${fontname}" | tr " " "+")
|
||||
FONTNAME_FS=$(echo "${fontname}" | tr "[:upper:]" "[:lower:]" | tr " " "_")
|
||||
wget https://fonts.google.com/download?family=${FONTNAME_URL} -O /tmp/${FONTNAME_FS}.zip --quiet
|
||||
mkdir -p /usr/share/fonts/truetype/${FONTNAME_FS}/
|
||||
unzip -q /tmp/${FONTNAME_FS}.zip -d /usr/share/fonts/truetype/${FONTNAME_FS}/
|
||||
if [[ ${FONTNAME_FS} = 'roboto_serif' ]]; then
|
||||
mv /usr/share/fonts/truetype/${FONTNAME_FS}/ /tmp/roboto_serif_all/
|
||||
mv /tmp/roboto_serif_all/static/RobotoSerif/ /usr/share/fonts/truetype/${FONTNAME_FS}/
|
||||
rm -rf /tmp/roboto_serif_all/
|
||||
fi
|
||||
rm -f /tmp/${FONTNAME_FS}.zip
|
||||
done << EOF
|
||||
Open Sans
|
||||
Roboto
|
||||
Roboto Serif
|
||||
Lato
|
||||
Exo
|
||||
Tinos
|
||||
Source Code Pro
|
||||
Roboto Mono
|
||||
Courier Prime
|
||||
EOF
|
||||
# Fonts installed with package manager:
|
||||
# Noto: Noto Sans, Noto Serif, Noto Mono
|
||||
|
||||
# Update font cache
|
||||
fc-cache -f
|
|
@ -0,0 +1,69 @@
|
|||
#!/bin/bash
|
||||
set -e
|
||||
# Any subsequent(*) commands which fail will cause the shell script to exit immediately
|
||||
|
||||
allow_only="MIT"
|
||||
allow_only="$allow_only;MIT License"
|
||||
allow_only="$allow_only;BSD License"
|
||||
allow_only="$allow_only;Apache Software License"
|
||||
allow_only="$allow_only;GNU General Public License v2 or later (GPLv2+)"
|
||||
allow_only="$allow_only;GNU General Public License v2 (GPLv2)"
|
||||
allow_only="$allow_only;GNU General Public License v3 (GPLv3)"
|
||||
allow_only="$allow_only;GNU Library or Lesser General Public License (LGPL)"
|
||||
allow_only="$allow_only;GNU Lesser General Public License v2 or later (LGPLv2+)"
|
||||
allow_only="$allow_only;Mozilla Public License 1.0 (MPL)"
|
||||
allow_only="$allow_only;Mozilla Public License 1.1 (MPL 1.1)"
|
||||
allow_only="$allow_only;Mozilla Public License 2.0 (MPL 2.0)"
|
||||
allow_only="$allow_only;Historical Permission Notice and Disclaimer (HPND)"
|
||||
allow_only="$allow_only;Python Software Foundation License"
|
||||
|
||||
ignore="jsonschema"
|
||||
ignore="$ignore;webencodings"
|
||||
|
||||
|
||||
pip3 install pip-licenses
|
||||
pip-licenses --allow-only "$allow_only" >/dev/null
|
||||
pip-licenses -l --no-license-path -f plain-vertical --ignore-packages "$ignore" > NOTICE
|
||||
|
||||
|
||||
# Those packages do not include valid license files
|
||||
webencodings_license='''Copyright (c) 2012 by Simon Sapin.
|
||||
|
||||
Some rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are
|
||||
met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
|
||||
* Redistributions in binary form must reproduce the above
|
||||
copyright notice, this list of conditions and the following
|
||||
disclaimer in the documentation and/or other materials provided
|
||||
with the distribution.
|
||||
|
||||
* The names of the contributors may not be used to endorse or
|
||||
promote products derived from this software without specific
|
||||
prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.'''
|
||||
|
||||
|
||||
echo "" >> NOTICE
|
||||
echo "webencodings" >> NOTICE
|
||||
version=`pip freeze | grep webencodings | cut -d"=" -f 3`
|
||||
echo "$version" >> NOTICE
|
||||
echo "BSD License" >> NOTICE
|
||||
echo "$webencodings_license" >> NOTICE
|
||||
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,61 @@
|
|||
[tool.poetry]
|
||||
name = "reportcreator-api"
|
||||
version = "0.1.0"
|
||||
description = "Pentest report creator"
|
||||
authors = []
|
||||
packages = [{include = "reportcreator_api"}]
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = "~3.10"
|
||||
django = "4.2"
|
||||
djangorestframework = "3.14.0"
|
||||
adrf = "0.1.0"
|
||||
# check weasyprint performance before updating: https://kozea.github.io/WeasyPerf/
|
||||
# do not update to 58.* until text spacing and footnote-call issues are fixed
|
||||
weasyprint = "57.2"
|
||||
|
||||
django-phonenumber-field = { extras = ["phonenumberslite"], version = "^7.0.0" }
|
||||
django-csp = "^3.7"
|
||||
django-storages = "^1.13.2"
|
||||
drf-nested-routers = "^0.93.4"
|
||||
django-filter = "^23.1"
|
||||
|
||||
psycopg = { extras = ["binary"], version = "^3.1.8" }
|
||||
gunicorn = "^20.1.0"
|
||||
uvicorn = "^0.21.1"
|
||||
whitenoise = "^6.4.0"
|
||||
brotli = "^1.0.9"
|
||||
requests = "^2.28.2"
|
||||
httpx = "^0.23.3"
|
||||
|
||||
jsonschema = "^4.17.3"
|
||||
python-decouple = "^3.8"
|
||||
pycryptodomex = "^3.17"
|
||||
pyotp = "^2.8.0"
|
||||
qrcode = { extras = ["pil"], version = "^7.4.2" }
|
||||
fido2 = "^1.1.1"
|
||||
authlib = "^1.2.0"
|
||||
python-gnupg = "^0.5.0"
|
||||
|
||||
lorem-text = "^2.1"
|
||||
zipstream-new = "^1.1.8"
|
||||
boto3 = "^1.26.5"
|
||||
pillow-heif = "^0.10.1"
|
||||
playwright = "^1.32.1"
|
||||
pikepdf = "^7.1.2"
|
||||
celery = { extras = ["librabbitmq"], version = "^5.2.7" }
|
||||
|
||||
django-debug-toolbar = "^4.0.0"
|
||||
debugpy = "^1.6.7"
|
||||
watchdog = "^3.0.0"
|
||||
pytest-django = "^4.5.2"
|
||||
pytest-xdist = "^3.2.1"
|
||||
pytest-cov = "^4.0.0"
|
||||
elastic-apm = "^6.15.1"
|
||||
|
||||
|
||||
|
||||
[build-system]
|
||||
requires = ["poetry-core"]
|
||||
build-backend = "poetry.core.masonry.api"
|
||||
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,4 @@
|
|||
[run]
|
||||
relative_files = True
|
||||
source = ./
|
||||
omit = reportcreator_api/tests/*
|
|
@ -0,0 +1,6 @@
|
|||
<!DOCTYPE html>
|
||||
<head></head>
|
||||
<body>
|
||||
<h1>Frontend not built</h1>
|
||||
<p>In development mode, the frontend is accessible via <a href="http://localhost:3000/">http://localhost:3000/</a></p>
|
||||
</body>
|
|
@ -0,0 +1,22 @@
|
|||
#!/usr/bin/env python
|
||||
"""Django's command-line utility for administrative tasks."""
|
||||
import os
|
||||
import sys
|
||||
|
||||
|
||||
def main():
|
||||
"""Run administrative tasks."""
|
||||
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'reportcreator_api.conf.settings')
|
||||
try:
|
||||
from django.core.management import execute_from_command_line
|
||||
except ImportError as exc:
|
||||
raise ImportError(
|
||||
"Couldn't import Django. Are you sure it's installed and "
|
||||
"available on your PYTHONPATH environment variable? Did you "
|
||||
"forget to activate a virtual environment?"
|
||||
) from exc
|
||||
execute_from_command_line(sys.argv)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
|
@ -0,0 +1,5 @@
|
|||
[pytest]
|
||||
DJANGO_SETTINGS_MODULE = reportcreator_api.conf.settings_test
|
||||
# warnings from libraries
|
||||
filterwarnings =
|
||||
ignore:remove loop argument:DeprecationWarning
|
|
@ -0,0 +1,3 @@
|
|||
from reportcreator_api.conf.celery import celery_app
|
||||
|
||||
__all__ = ('celery_app',)
|
|
@ -0,0 +1,7 @@
|
|||
from django.apps import AppConfig
|
||||
|
||||
|
||||
class ApiUtilsConfig(AppConfig):
|
||||
default_auto_field = 'django.db.models.BigAutoField'
|
||||
name = 'reportcreator_api.api_utils'
|
||||
|
|
@ -0,0 +1,97 @@
|
|||
import logging
|
||||
import zipstream
|
||||
import boto3
|
||||
import io
|
||||
import json
|
||||
import itertools
|
||||
from pathlib import Path
|
||||
from django.apps import apps
|
||||
from django.core import serializers
|
||||
from django.core.serializers.json import DjangoJSONEncoder
|
||||
|
||||
from reportcreator_api.archive import crypto
|
||||
from reportcreator_api.pentests.models import UploadedImage, UploadedAsset, UploadedProjectFile, UploadedUserNotebookImage, ArchivedProject
|
||||
|
||||
|
||||
def create_database_dump():
|
||||
"""
|
||||
Return a database dump of django models. It uses the same format as "manage.py dumpdata --format=jsonl".
|
||||
"""
|
||||
exclude_models = ['contenttypes.ContentType', 'sessions.Session', 'users.Session', 'admin.LogEntry', 'auth.Permission', 'auth.Group', 'pentests.LockInfo']
|
||||
try:
|
||||
app_list = [app_config for app_config in apps.get_app_configs() if app_config.models_module is not None]
|
||||
models = list(itertools.chain(*map(lambda a: a.get_models(), app_list)))
|
||||
for model in models:
|
||||
natural_key = True
|
||||
if model._meta.label == 'users.PentestUser':
|
||||
natural_key = False
|
||||
if model._meta.label not in exclude_models:
|
||||
for e in model._default_manager.order_by(model._meta.pk.name).iterator():
|
||||
yield json.dumps(
|
||||
serializers.serialize(
|
||||
'python',
|
||||
[e],
|
||||
use_natural_foreign_keys=natural_key,
|
||||
use_natural_primary_keys=natural_key
|
||||
)[0], cls=DjangoJSONEncoder, ensure_ascii=True).encode() + b'\n'
|
||||
except Exception as ex:
|
||||
logging.exception('Error creating database dump')
|
||||
raise ex
|
||||
|
||||
|
||||
def backup_files(z, model, path):
|
||||
for f in model.objects.values_list('file', flat=True).distinct().iterator():
|
||||
try:
|
||||
z.write_iter(str(Path(path) / f), model.file.field.storage.open(f).chunks())
|
||||
except (FileNotFoundError, OSError) as ex:
|
||||
logging.warning(f'Could not backup file {f}: {ex}')
|
||||
|
||||
|
||||
def create_backup():
|
||||
logging.info('Backup requested')
|
||||
z = zipstream.ZipFile(mode='w', compression=zipstream.ZIP_DEFLATED)
|
||||
z.write_iter('backup.jsonl', create_database_dump())
|
||||
|
||||
backup_files(z, UploadedImage, 'uploadedimages')
|
||||
backup_files(z, UploadedUserNotebookImage, 'uploadedimages')
|
||||
backup_files(z, UploadedAsset, 'uploadedassets')
|
||||
backup_files(z, UploadedProjectFile, 'uploadedfiles')
|
||||
backup_files(z, ArchivedProject, 'archivedfiles')
|
||||
|
||||
return z
|
||||
|
||||
|
||||
def encrypt_backup(z, aes_key):
|
||||
buf = io.BytesIO()
|
||||
with crypto.open(fileobj=buf, mode='wb', key_id=None, key=crypto.EncryptionKey(id=None, key=aes_key)) as c:
|
||||
for chunk in z:
|
||||
c.write(chunk)
|
||||
yield buf.getvalue()
|
||||
buf.seek(0)
|
||||
buf.truncate()
|
||||
if remaining := buf.getvalue():
|
||||
yield remaining
|
||||
|
||||
|
||||
def upload_to_s3_bucket(z, s3_params):
|
||||
s3 = boto3.resource('s3', **s3_params.get('boto3_params', {}))
|
||||
bucket = s3.Bucket(s3_params['bucket_name'])
|
||||
|
||||
class Wrapper:
|
||||
def __init__(self, z):
|
||||
self.z = iter(z)
|
||||
self.buffer = b''
|
||||
|
||||
def read(self, size=8192):
|
||||
while len(self.buffer) < size:
|
||||
try:
|
||||
self.buffer += next(self.z)
|
||||
except StopIteration:
|
||||
break
|
||||
ret = self.buffer[:size]
|
||||
|
||||
self.buffer = self.buffer[size:]
|
||||
return ret
|
||||
|
||||
bucket.upload_fileobj(Wrapper(z), s3_params['key'])
|
||||
|
|
@ -0,0 +1,56 @@
|
|||
import functools
|
||||
import operator
|
||||
import uuid
|
||||
from django.utils.module_loading import import_string
|
||||
from django.core.cache import cache
|
||||
from django.db import connection
|
||||
from django.db.migrations.executor import MigrationExecutor
|
||||
from rest_framework.response import Response
|
||||
|
||||
|
||||
def run_healthchecks(checks: dict[str, str]):
|
||||
res = {}
|
||||
for service, check_func_name in checks.items():
|
||||
check_func = import_string(check_func_name)
|
||||
res[service] = check_func()
|
||||
|
||||
has_errors = not all(res.values())
|
||||
return Response(data=res, status=503 if has_errors else 200)
|
||||
|
||||
|
||||
def check_database():
|
||||
"""
|
||||
Check if the application can perform a dummy sql query
|
||||
"""
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute("SELECT 1; -- Healthcheck")
|
||||
row = cursor.fetchone()
|
||||
return row and row[0] == 1
|
||||
|
||||
|
||||
def check_cache():
|
||||
"""
|
||||
Check if the application can connect to the default cached and read/write some dummy data.
|
||||
"""
|
||||
dummy = str(uuid.uuid4())
|
||||
key = "healthcheck:%s" % dummy
|
||||
cache.set(key, dummy, timeout=5)
|
||||
cached_value = cache.get(key)
|
||||
cache.delete(key)
|
||||
return cached_value == dummy
|
||||
|
||||
|
||||
def check_migrations():
|
||||
"""
|
||||
Check if django has unapplied migrations.
|
||||
"""
|
||||
cache_key = __name__ + '.migration_check_cache'
|
||||
if res := cache.get(cache_key):
|
||||
return res
|
||||
|
||||
executor = MigrationExecutor(connection)
|
||||
res = not executor.migration_plan(executor.loader.graph.leaf_nodes())
|
||||
if res:
|
||||
cache.set(key=cache_key, value=res, timeout=10 * 60)
|
||||
return res
|
||||
|
|
@ -0,0 +1,38 @@
|
|||
from django.db import migrations, models
|
||||
from django.conf import settings
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
"""
|
||||
Create database models used by LanguageTool.
|
||||
LanguageTool does not come with a way to run migrations, therefore we manage them with the Django ORM.
|
||||
Resue Django user accounts for LanguageTool users to authenticate (via a DB view).
|
||||
"""
|
||||
|
||||
initial = True
|
||||
|
||||
dependencies = [
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunSQL(
|
||||
sql="CREATE VIEW users AS " +
|
||||
"SELECT ('x' || translate(u.id::text, '-', ''))::bit(63)::bigint AS id, u.id::text AS email, u.id::text AS api_key FROM users_pentestuser u " +
|
||||
"UNION SELECT 1 AS id, 'languagetool' AS email, 'languagetool' AS api_key;",
|
||||
reverse_sql="DROP VIEW users;"
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='LanguageToolIgnoreWords',
|
||||
fields=[
|
||||
('id', models.BigAutoField(primary_key=True, serialize=False)),
|
||||
('user_id', models.BigIntegerField(db_index=True)),
|
||||
('ignore_word', models.CharField(max_length=255)),
|
||||
('created_at', models.DateTimeField(auto_created=True)),
|
||||
('updated_at', models.DateTimeField(auto_now=True)),
|
||||
],
|
||||
options={
|
||||
'db_table': 'ignore_words',
|
||||
}
|
||||
)
|
||||
]
|
|
@ -0,0 +1,13 @@
|
|||
from django.db import models
|
||||
|
||||
|
||||
class LanguageToolIgnoreWords(models.Model):
|
||||
id = models.BigAutoField(primary_key=True)
|
||||
created_at = models.DateTimeField(auto_created=True)
|
||||
updated_at = models.DateTimeField(auto_now=True)
|
||||
user_id = models.BigIntegerField(db_index=True)
|
||||
ignore_word = models.CharField(max_length=255)
|
||||
|
||||
class Meta:
|
||||
db_table = 'ignore_words'
|
||||
|
|
@ -0,0 +1,11 @@
|
|||
from rest_framework.permissions import IsAuthenticated
|
||||
|
||||
class IsSystemUser(IsAuthenticated):
|
||||
def has_permission(self, request, view):
|
||||
return super().has_permission(request, view) and request.user.is_system_user
|
||||
|
||||
|
||||
class IsUserManagerOrSuperuser(IsAuthenticated):
|
||||
def has_permission(self, request, view):
|
||||
return super().has_permission(request, view) and (request.user.is_user_manager or request.user.is_superuser)
|
||||
|
|
@ -0,0 +1,114 @@
|
|||
import json
|
||||
import logging
|
||||
import httpx
|
||||
from base64 import b64decode
|
||||
from urllib.parse import urljoin
|
||||
from django.conf import settings
|
||||
from rest_framework import serializers, exceptions
|
||||
|
||||
from reportcreator_api.pentests.models import Language
|
||||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class TextAnnotationField(serializers.Serializer):
|
||||
text = serializers.CharField(required=False, allow_blank=True, trim_whitespace=False)
|
||||
markup = serializers.CharField(required=False, allow_blank=True, trim_whitespace=False)
|
||||
interpretAs = serializers.CharField(required=False, allow_blank=True, trim_whitespace=False)
|
||||
offset = serializers.IntegerField(min_value=0, required=False)
|
||||
|
||||
def validate(self, attrs):
|
||||
if attrs.get('text') is None and attrs.get('markup') is None:
|
||||
raise serializers.ValidationError('Either text or markup is required')
|
||||
return attrs
|
||||
|
||||
|
||||
class TextDataField(serializers.Serializer):
|
||||
annotation = TextAnnotationField(many=True)
|
||||
|
||||
|
||||
class LanguageToolSerializerBase(serializers.Serializer):
|
||||
def languagetool_auth(self):
|
||||
return {
|
||||
'username': str(self.context['request'].user.id),
|
||||
'apiKey': str(self.context['request'].user.id),
|
||||
} if settings.SPELLCHECK_DICTIONARY_PER_USER else {
|
||||
'username': 'languagetool',
|
||||
'apiKey': 'languagetool',
|
||||
}
|
||||
|
||||
async def languagetool_request(self, path, data):
|
||||
if not settings.SPELLCHECK_URL:
|
||||
raise exceptions.PermissionDenied('Spell checker not configured')
|
||||
|
||||
async with httpx.AsyncClient(timeout=10) as client:
|
||||
res = await client.post(
|
||||
url=urljoin(settings.SPELLCHECK_URL, path),
|
||||
data=self.languagetool_auth() | data
|
||||
)
|
||||
return res.json()
|
||||
|
||||
|
||||
class LanguageToolSerializer(LanguageToolSerializerBase):
|
||||
language = serializers.ChoiceField(choices=Language.choices + [('auto', 'auto')])
|
||||
data = TextDataField()
|
||||
|
||||
async def spellcheck(self):
|
||||
data = self.validated_data
|
||||
return await self.languagetool_request('/v2/check', {
|
||||
'language': data['language'],
|
||||
'data': json.dumps(data['data'], ensure_ascii=False),
|
||||
**({
|
||||
'preferredVariants': Language.values
|
||||
} if data['language'] == 'auto' else {}),
|
||||
})
|
||||
|
||||
|
||||
def validate_singe_word(val):
|
||||
if ' ' in val:
|
||||
raise serializers.ValidationError('Only a single word is supported')
|
||||
|
||||
|
||||
class LanguageToolAddWordSerializer(LanguageToolSerializerBase):
|
||||
word = serializers.CharField(max_length=255, validators=[validate_singe_word])
|
||||
|
||||
async def save(self):
|
||||
return await self.languagetool_request('/v2/words/add', {
|
||||
'word': self.validated_data['word'],
|
||||
})
|
||||
|
||||
|
||||
class S3ParamsSerializer(serializers.Serializer):
|
||||
bucket_name = serializers.CharField()
|
||||
key = serializers.CharField()
|
||||
boto3_params = serializers.JSONField(required=False)
|
||||
|
||||
|
||||
class BackupSerializer(serializers.Serializer):
|
||||
key = serializers.CharField()
|
||||
aes_key = serializers.CharField(required=False, allow_null=True)
|
||||
s3_params = S3ParamsSerializer(required=False, allow_null=True)
|
||||
|
||||
def validate_key(self, key):
|
||||
if not settings.BACKUP_KEY or len(settings.BACKUP_KEY) < 20:
|
||||
log.error('Backup key not set or too short (min 20 chars)')
|
||||
raise serializers.ValidationError()
|
||||
if key != settings.BACKUP_KEY:
|
||||
log.error('Invalid backup key')
|
||||
raise serializers.ValidationError()
|
||||
return key
|
||||
|
||||
def validate_aes_key(self, value):
|
||||
if not value:
|
||||
return None
|
||||
|
||||
try:
|
||||
key_bytes = b64decode(value)
|
||||
if len(key_bytes) != 32:
|
||||
raise serializers.ValidationError('Invalid key length: must be a 256-bit AES key')
|
||||
return value
|
||||
except ValueError:
|
||||
raise serializers.ValidationError('Invalid base64 encoding')
|
||||
|
||||
|
|
@ -0,0 +1,130 @@
|
|||
import logging
|
||||
from asgiref.sync import sync_to_async
|
||||
from base64 import b64decode
|
||||
from django.http import StreamingHttpResponse
|
||||
from django.conf import settings
|
||||
from django.utils import timezone
|
||||
from rest_framework import viewsets, routers
|
||||
from rest_framework.serializers import Serializer
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.settings import api_settings
|
||||
|
||||
from reportcreator_api.api_utils.serializers import LanguageToolAddWordSerializer, LanguageToolSerializer, BackupSerializer
|
||||
from reportcreator_api.api_utils.healthchecks import run_healthchecks
|
||||
from reportcreator_api.api_utils.permissions import IsSystemUser, IsUserManagerOrSuperuser
|
||||
from reportcreator_api.api_utils import backup_utils
|
||||
from reportcreator_api.users.models import PentestUser
|
||||
from reportcreator_api.utils.api import GenericAPIViewAsync
|
||||
from reportcreator_api.utils import license
|
||||
from reportcreator_api.pentests.models import Language
|
||||
from reportcreator_api.pentests.models import ProjectMemberRole
|
||||
from reportcreator_api.tasks.models import PeriodicTask
|
||||
from reportcreator_api.utils.utils import copy_keys
|
||||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class UtilsViewSet(viewsets.ViewSet):
|
||||
def get_serializer_class(self):
|
||||
if self.action == 'backup':
|
||||
return BackupSerializer
|
||||
elif self.action == 'spellcheck':
|
||||
return LanguageToolSerializer
|
||||
else:
|
||||
return Serializer
|
||||
|
||||
def get_serializer(self, *args, **kwargs):
|
||||
return self.get_serializer_class()(*args, **kwargs)
|
||||
|
||||
def list(self, *args, **kwargs):
|
||||
return routers.APIRootView(api_root_dict={
|
||||
'settings': 'utils-settings',
|
||||
'license': 'utils-license',
|
||||
'spellcheck': 'utils-spellcheck',
|
||||
'backup': 'utils-backup',
|
||||
'healthcheck': 'utils-healthcheck',
|
||||
}).get(*args, **kwargs)
|
||||
|
||||
@action(detail=False, url_name='settings', url_path='settings', authentication_classes=[], permission_classes=[])
|
||||
def settings_endpoint(self, *args, **kwargs):
|
||||
return Response({
|
||||
'languages': [{'code': l[0], 'name': l[1]} for l in Language.choices],
|
||||
'project_member_roles': [{'role': r.role, 'default': r.default} for r in ProjectMemberRole.predefined_roles],
|
||||
'auth_providers': [{'id': k, 'name': v.get('label', k)} for k, v in settings.AUTHLIB_OAUTH_CLIENTS.items()] if license.is_professional() else [],
|
||||
'elastic_apm_rum_config': settings.ELASTIC_APM_RUM_CONFIG if settings.ELASTIC_APM_RUM_ENABLED else None,
|
||||
'archiving_threshold': settings.ARCHIVING_THRESHOLD,
|
||||
'license': copy_keys(license.check_license(), ['type', 'error']),
|
||||
'features': {
|
||||
'private_designs': settings.ENABLE_PRIVATE_DESIGNS,
|
||||
'spellcheck': bool(settings.SPELLCHECK_URL and license.is_professional()),
|
||||
'archiving': license.is_professional(),
|
||||
},
|
||||
})
|
||||
|
||||
@action(detail=False, methods=['post'], permission_classes=api_settings.DEFAULT_PERMISSION_CLASSES + [IsSystemUser, license.ProfessionalLicenseRequired])
|
||||
def backup(self, request, *args, **kwargs):
|
||||
serializer = self.get_serializer(data=request.data)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
data = serializer.data
|
||||
aes_key = data.get('aes_key')
|
||||
if aes_key:
|
||||
aes_key = b64decode(aes_key)
|
||||
|
||||
z = backup_utils.create_backup()
|
||||
if aes_key:
|
||||
z = backup_utils.encrypt_backup(z, aes_key=aes_key)
|
||||
|
||||
if s3_params := data.get('s3_params'):
|
||||
backup_utils.upload_to_s3_bucket(z, s3_params)
|
||||
return Response(status=200)
|
||||
else:
|
||||
response = StreamingHttpResponse(z)
|
||||
filename = f'backup-{timezone.now().isoformat()}.zip'
|
||||
if aes_key:
|
||||
filename += '.crypt'
|
||||
else:
|
||||
response['Content-Type'] = 'application/zip'
|
||||
|
||||
response['Content-Disposition'] = f"attachment; filename={filename}"
|
||||
log.info('Sending Backup')
|
||||
return response
|
||||
|
||||
@action(detail=False, methods=['get'], permission_classes=api_settings.DEFAULT_PERMISSION_CLASSES + [IsUserManagerOrSuperuser])
|
||||
def license(self, request, *args, **kwargs):
|
||||
return Response(data=license.check_license() | {
|
||||
'active_users': PentestUser.objects.get_licensed_user_count(),
|
||||
})
|
||||
|
||||
|
||||
class SpellcheckView(GenericAPIViewAsync):
|
||||
serializer_class = LanguageToolSerializer
|
||||
permission_classes = api_settings.DEFAULT_PERMISSION_CLASSES + [license.ProfessionalLicenseRequired]
|
||||
|
||||
async def post(self, request, *args, **kwargs):
|
||||
serializer = await self.aget_valid_serializer(data=request.data)
|
||||
data = await serializer.spellcheck()
|
||||
return Response(data=data)
|
||||
|
||||
|
||||
class SpellcheckWordView(GenericAPIViewAsync):
|
||||
serializer_class = LanguageToolAddWordSerializer
|
||||
permission_classes = api_settings.DEFAULT_PERMISSION_CLASSES + [license.ProfessionalLicenseRequired]
|
||||
|
||||
async def post(self, request, *args, **kwargs):
|
||||
serializer = await self.aget_valid_serializer(data=request.data)
|
||||
data = await serializer.save()
|
||||
return Response(data=data)
|
||||
|
||||
|
||||
class HealthcheckView(GenericAPIViewAsync):
|
||||
authentication_classes = []
|
||||
permission_classes = []
|
||||
|
||||
async def get(self, *args, **kwargs):
|
||||
# Trigger periodic tasks
|
||||
await PeriodicTask.objects.run_all_pending_tasks()
|
||||
|
||||
return await sync_to_async(run_healthchecks)(settings.HEALTH_CHECKS)
|
||||
|
|
@ -0,0 +1,4 @@
|
|||
from .base import *
|
||||
from .fields import *
|
||||
from .storage import *
|
||||
from .secret_sharing import *
|
|
@ -0,0 +1,362 @@
|
|||
import base64
|
||||
import dataclasses
|
||||
import enum
|
||||
import io
|
||||
import json
|
||||
from typing import Optional
|
||||
from Cryptodome.Cipher import AES
|
||||
from Cryptodome.Cipher._mode_gcm import _GHASH, _ghash_clmul, _ghash_portable
|
||||
from Cryptodome.Util.number import long_to_bytes, bytes_to_long
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.files.utils import FileProxyMixin
|
||||
|
||||
|
||||
# Magic bytes to identify encrypted data
|
||||
# Invalid UTF-8, such that an error occurs when someone tries to load encrypted data as text
|
||||
MAGIC = b'\xC2YPT'
|
||||
|
||||
|
||||
class CryptoError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class EncryptionCipher(enum.Enum):
|
||||
AES_GCM = 'AES-GCM'
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class EncryptionKey:
|
||||
id: str
|
||||
key: bytes
|
||||
cipher: EncryptionCipher = EncryptionCipher.AES_GCM
|
||||
revoked: bool = False
|
||||
|
||||
@classmethod
|
||||
def from_json_list(cls, data: str) -> dict:
|
||||
if not data:
|
||||
return {}
|
||||
return dict(map(lambda e: (e['id'], cls(**(e | {
|
||||
'cipher': EncryptionCipher(e['cipher']),
|
||||
'key': base64.b64decode(e['key']),
|
||||
}))), json.loads(data)))
|
||||
|
||||
|
||||
class ReadIntoAdapter(FileProxyMixin):
|
||||
def __init__(self, file) -> None:
|
||||
self.file = file
|
||||
|
||||
def readinto(self, b):
|
||||
r = self.file.read(len(b))
|
||||
b[0:len(r)] = r
|
||||
return len(r)
|
||||
|
||||
|
||||
def open(fileobj, mode='r', **kwargs):
|
||||
plaintext_fallback = kwargs.get('plaintext_fallback', settings.ENCRYPTION_PLAINTEXT_FALLBACK)
|
||||
|
||||
if mode in ['r', 'rb']:
|
||||
key = kwargs.pop('key', None)
|
||||
keys = kwargs.pop('keys', settings.ENCRYPTION_KEYS)
|
||||
|
||||
if not hasattr(fileobj, 'readinto'):
|
||||
fileobj = ReadIntoAdapter(fileobj)
|
||||
if not hasattr(fileobj, 'peek'):
|
||||
fileobj = io.BufferedReader(fileobj)
|
||||
if fileobj.peek(len(MAGIC)).startswith(MAGIC):
|
||||
return DecryptionStream(fileobj=fileobj, key=key, keys=keys, **kwargs)
|
||||
elif plaintext_fallback:
|
||||
return BufferedReaderNonClosing(fileobj)
|
||||
else:
|
||||
raise CryptoError('Data is not encrypted and plaintext fallback is disabled')
|
||||
elif mode in ['w', 'wb']:
|
||||
key_id = kwargs.pop('key_id', settings.DEFAULT_ENCRYPTION_KEY_ID)
|
||||
key = kwargs.pop('key', settings.ENCRYPTION_KEYS.get(key_id))
|
||||
|
||||
if key:
|
||||
return EncryptionStream(fileobj, key=key, **kwargs)
|
||||
elif plaintext_fallback:
|
||||
return BufferedWriterNonClosing(fileobj)
|
||||
else:
|
||||
raise CryptoError('No key provided and plaintext fallback is disabled')
|
||||
|
||||
|
||||
def readexact(fileobj, size):
|
||||
out = b''
|
||||
while len(out) < size:
|
||||
chunk = fileobj.read(size - len(out))
|
||||
if not chunk:
|
||||
raise CryptoError('Data missing on stream. Cannot read desired amount of data.')
|
||||
out += chunk
|
||||
return out
|
||||
|
||||
|
||||
def readall(fileobj):
|
||||
out = b''
|
||||
while chunk := fileobj.read():
|
||||
out += chunk
|
||||
return out
|
||||
|
||||
|
||||
class NonClosingBufferedIOMixin:
|
||||
"""
|
||||
BufferedReader that does not close the underlying raw stream when the reader gets closed.
|
||||
"""
|
||||
def close(self):
|
||||
if self.raw is not None and not self.closed:
|
||||
# may raise BlockingIOError or BrokenPipeError etc
|
||||
self.flush()
|
||||
|
||||
class BufferedReaderNonClosing(NonClosingBufferedIOMixin, io.BufferedReader):
|
||||
pass
|
||||
|
||||
|
||||
class BufferedWriterNonClosing(NonClosingBufferedIOMixin, io.BufferedWriter):
|
||||
pass
|
||||
|
||||
|
||||
class EncryptionStream(io.RawIOBase):
|
||||
def __init__(self, fileobj, key: EncryptionKey, nonce=None) -> None:
|
||||
self.fileobj = fileobj
|
||||
self.header_written = False
|
||||
self.key = key
|
||||
self.cipher = self._init_cipher(nonce=nonce)
|
||||
|
||||
def readable(self) -> bool:
|
||||
return False
|
||||
|
||||
def writable(self) -> bool:
|
||||
return True
|
||||
|
||||
def seekable(self) -> bool:
|
||||
return False
|
||||
|
||||
def _init_cipher(self, nonce=None):
|
||||
if self.key.revoked:
|
||||
raise CryptoError('Key is revoked. It cannot be used for encryption anymore.')
|
||||
if self.key.cipher == EncryptionCipher.AES_GCM:
|
||||
return AES.new(key=self.key.key, mode=AES.MODE_GCM, nonce=nonce)
|
||||
else:
|
||||
raise CryptoError('Unknown cipher')
|
||||
|
||||
def _ensure_header(self):
|
||||
if self.header_written:
|
||||
return
|
||||
|
||||
# Write header at start of file before any data
|
||||
header = MAGIC + json.dumps({
|
||||
'cipher': self.key.cipher.value,
|
||||
'nonce': base64.b64encode(self.cipher.nonce).decode(),
|
||||
'key_id': self.key.id,
|
||||
}).encode() + b'\x00'
|
||||
self.fileobj.write(header)
|
||||
|
||||
# Add header to authentication data. Modifications in header will be detected by authentication tag.
|
||||
self.cipher.update(header)
|
||||
self.header_written = True
|
||||
|
||||
def write(self, data: bytes):
|
||||
if self.closed:
|
||||
raise ValueError('write() on closed stream')
|
||||
|
||||
# Encrypt data
|
||||
self._ensure_header()
|
||||
self.fileobj.write(self.cipher.encrypt(data))
|
||||
|
||||
def flush(self) -> None:
|
||||
return self.fileobj.flush()
|
||||
|
||||
def close(self):
|
||||
if self.closed:
|
||||
return
|
||||
try:
|
||||
# Write authentication tag at end
|
||||
self._ensure_header()
|
||||
tag = self.cipher.digest()
|
||||
self.fileobj.write(tag)
|
||||
finally:
|
||||
super().close()
|
||||
|
||||
|
||||
class DecryptionStream(io.RawIOBase):
|
||||
def __init__(self, fileobj, key: Optional[EncryptionKey] = None, keys: Optional[dict[str, EncryptionKey]] = None) -> None:
|
||||
self.fileobj = fileobj
|
||||
self.metdata = None
|
||||
self.cipher = None
|
||||
self.header_len = 0
|
||||
self.auth_tag_len = 16
|
||||
self.auth_tag_buffer = bytearray()
|
||||
self.auth_tag_verified = False
|
||||
|
||||
self._load_header(key=key, keys=keys)
|
||||
|
||||
def readable(self) -> bool:
|
||||
return True
|
||||
|
||||
def writable(self) -> bool:
|
||||
return False
|
||||
|
||||
def seekable(self) -> bool:
|
||||
return self.fileobj.seekable() and self.metadata['cipher'] == EncryptionCipher.AES_GCM
|
||||
|
||||
def _load_header(self, key=None, keys=None):
|
||||
# Check magic
|
||||
if self.fileobj.read(len(MAGIC)) != MAGIC:
|
||||
raise CryptoError('Invalid header: magic not found')
|
||||
|
||||
# Read metadata
|
||||
metadata_buffer = bytearray()
|
||||
while True:
|
||||
b = self.fileobj.read(1)
|
||||
if not b:
|
||||
raise CryptoError('Invalid header: missing or corrupted metadata')
|
||||
elif b == b'\x00':
|
||||
break
|
||||
else:
|
||||
metadata_buffer.extend(b)
|
||||
|
||||
# Decode metadata
|
||||
try:
|
||||
self.metadata = json.loads(metadata_buffer)
|
||||
self.metadata['cipher'] = EncryptionCipher(self.metadata['cipher'])
|
||||
self.metadata['nonce'] = base64.b64decode(self.metadata['nonce'])
|
||||
|
||||
if key:
|
||||
self.metadata['key'] = key
|
||||
elif keys:
|
||||
self.metadata['key'] = keys.get(self.metadata['key_id'])
|
||||
else:
|
||||
raise CryptoError('Either a key or a multiple available keys must be given')
|
||||
except CryptoError as ex:
|
||||
raise
|
||||
except Exception as ex:
|
||||
raise CryptoError('Failed to load metadata') from ex
|
||||
|
||||
# Check metadata
|
||||
if not self.metadata['key']:
|
||||
raise CryptoError('Metadata contains unknown key_id. Cannot find a suitable key for decryption.')
|
||||
if self.metadata['key'].revoked:
|
||||
raise CryptoError('Key was revoked and cannot be used for decryption anymore.')
|
||||
|
||||
# Initialize cipher
|
||||
try:
|
||||
if self.metadata['key'].cipher == EncryptionCipher.AES_GCM:
|
||||
self.cipher = AES.new(
|
||||
mode=AES.MODE_GCM,
|
||||
key=self.metadata['key'].key,
|
||||
nonce=self.metadata['nonce']
|
||||
)
|
||||
else:
|
||||
raise CryptoError('Unsupported cipher')
|
||||
except Exception as ex:
|
||||
raise CryptoError('Error initializing cipher') from ex
|
||||
|
||||
# Add header to auth tag
|
||||
header = MAGIC + metadata_buffer + b'\x00'
|
||||
self.header_len = len(header)
|
||||
self.cipher.update(header)
|
||||
|
||||
# Buffer auth tag at end
|
||||
self.auth_tag_buffer.extend(readexact(self.fileobj, self.auth_tag_len))
|
||||
|
||||
def read(self, size=-1):
|
||||
# Decrypt data (except auth tag at end of stream)
|
||||
self.auth_tag_buffer.extend(self.fileobj.read(size))
|
||||
res = self.auth_tag_buffer[:-self.auth_tag_len]
|
||||
del self.auth_tag_buffer[:-self.auth_tag_len]
|
||||
return self.cipher.decrypt(res)
|
||||
|
||||
def readinto(self, buf) -> int:
|
||||
val = self.read(len(buf))
|
||||
buf[:len(val)] = val
|
||||
return len(val)
|
||||
|
||||
def tell(self) -> int:
|
||||
return self.fileobj.tell() - self.header_len - len(self.auth_tag_buffer)
|
||||
|
||||
def seek(self, offset: int, whence=io.SEEK_SET) -> int:
|
||||
if not self.seekable():
|
||||
raise io.UnsupportedOperation()
|
||||
|
||||
if whence not in [io.SEEK_SET, io.SEEK_END]:
|
||||
return self.tell()
|
||||
|
||||
# AEAD cipher modes support only linear decryption, no seeking
|
||||
# In order to be able to change the position, we first verify the auth tag to ensure that the ciphertext was not modified
|
||||
self._verify_auth_tag()
|
||||
self.auth_tag_buffer.clear()
|
||||
|
||||
# Then seek to the desired position in the file
|
||||
if whence == io.SEEK_SET:
|
||||
pos_absolute = self.fileobj.seek(offset + self.header_len, whence)
|
||||
elif whence == io.SEEK_END:
|
||||
pos_absolute = self.fileobj.seek(0, whence)
|
||||
pos_absolute = self.fileobj.seek(pos_absolute - self.auth_tag_len, io.SEEK_SET)
|
||||
|
||||
# Algin position in ciphertext to cipher blocks
|
||||
pos_in_ciphertext = pos_absolute - self.header_len
|
||||
num_blocks_skip = pos_in_ciphertext // self.cipher.block_size
|
||||
align_block_skip = pos_in_ciphertext % self.cipher.block_size
|
||||
self.fileobj.seek(pos_absolute - align_block_skip, io.SEEK_SET)
|
||||
|
||||
# Then we can use a regular CTR mode for decryption. CTR mode supports encrypting/decrypting arbitrary blocks.
|
||||
# We need to initialize the CTR mode with the correct nonce/IV. They need to calculated the same way as for the GCM mode.
|
||||
self.cipher = self._init_seek_cipher_aes_gcm(key=self.metadata['key'].key, nonce=self.metadata['nonce'], skip_blocks=num_blocks_skip)
|
||||
|
||||
# Finally can move from the block boundary to the final position
|
||||
self.auth_tag_buffer.clear()
|
||||
self.auth_tag_buffer.extend(readexact(self.fileobj, self.auth_tag_len))
|
||||
self.read(align_block_skip)
|
||||
|
||||
return self.tell()
|
||||
|
||||
def _init_seek_cipher_aes_gcm(self, key, nonce, skip_blocks):
|
||||
"""
|
||||
Initialized a new AES CTR cipher at a given block offset.
|
||||
Counter calculation is compatible with AES GCM.
|
||||
GCM CTR cipher initialized code is taken from Cryptodome.Cipher._mode_gcm.GcmMode.__init__
|
||||
"""
|
||||
# Step 1 in SP800-38D, Algorithm 4 (encryption) - Compute H
|
||||
# See also Algorithm 5 (decryption)
|
||||
hash_subkey = AES.new(mode=AES.MODE_ECB, key=key).encrypt(b'\x00' * 16)
|
||||
|
||||
# Step 2 - Compute J0
|
||||
if len(nonce) == 12:
|
||||
j0 = nonce + b"\x00\x00\x00\x01"
|
||||
else:
|
||||
fill = (16 - (len(nonce) % 16)) % 16 + 8
|
||||
ghash_in = (nonce +
|
||||
b'\x00' * fill +
|
||||
long_to_bytes(8 * len(nonce), 8))
|
||||
j0 = _GHASH(hash_subkey, _ghash_clmul or _ghash_portable).update(ghash_in).digest()
|
||||
|
||||
# Step 3 - Prepare GCTR cipher for encryption/decryption
|
||||
nonce_ctr = j0[:12]
|
||||
iv_ctr = (bytes_to_long(j0) + 1 + skip_blocks) & 0xFFFFFFFF
|
||||
return AES.new(
|
||||
mode=AES.MODE_CTR,
|
||||
key=key,
|
||||
initial_value=iv_ctr,
|
||||
nonce=nonce_ctr)
|
||||
|
||||
def _verify_auth_tag(self):
|
||||
if self.auth_tag_verified:
|
||||
return
|
||||
|
||||
try:
|
||||
# Read everything to update the internal auth tag calculation
|
||||
while _ := self.read():
|
||||
pass
|
||||
|
||||
self.cipher.verify(self.auth_tag_buffer)
|
||||
self.auth_tag_verified = True
|
||||
except Exception as ex:
|
||||
raise CryptoError('Auth tag verification failed') from ex
|
||||
|
||||
def close(self):
|
||||
try:
|
||||
self._verify_auth_tag()
|
||||
finally:
|
||||
super().close()
|
||||
|
||||
|
|
@ -0,0 +1,124 @@
|
|||
import io
|
||||
import json
|
||||
|
||||
import elasticapm
|
||||
from django.db import models
|
||||
from django.core import checks
|
||||
|
||||
from reportcreator_api.archive.crypto import base as crypto
|
||||
|
||||
|
||||
class EncryptedField(models.BinaryField):
|
||||
def __init__(self, base_field, editable=True, *args, **kwargs) -> None:
|
||||
self.base_field = base_field
|
||||
super().__init__(editable=editable, *args, **kwargs)
|
||||
|
||||
@property
|
||||
def model(self):
|
||||
try:
|
||||
return self.__dict__["model"]
|
||||
except KeyError:
|
||||
raise AttributeError(
|
||||
"'%s' object has no attribute 'model'" % self.__class__.__name__
|
||||
)
|
||||
|
||||
@model.setter
|
||||
def model(self, model):
|
||||
self.__dict__["model"] = model
|
||||
self.base_field.model = model
|
||||
|
||||
def check(self, **kwargs):
|
||||
errors = super().check(**kwargs)
|
||||
if self.base_field.remote_field:
|
||||
errors.append(
|
||||
checks.Error(
|
||||
"Base field for EncryptedField cannot be a related field.",
|
||||
obj=self,
|
||||
)
|
||||
)
|
||||
else:
|
||||
# Remove the field name checks as they are not needed here.
|
||||
base_errors = self.base_field.check()
|
||||
if base_errors:
|
||||
messages = "\n ".join(
|
||||
"%s (%s)" % (error.msg, error.id) for error in base_errors
|
||||
)
|
||||
errors.append(
|
||||
checks.Error(
|
||||
"Base field for EncryptedField has errors:\n %s" % messages,
|
||||
obj=self,
|
||||
)
|
||||
)
|
||||
return errors
|
||||
|
||||
def set_attributes_from_name(self, name):
|
||||
super().set_attributes_from_name(name)
|
||||
self.base_field.set_attributes_from_name(name)
|
||||
|
||||
@property
|
||||
def description(self):
|
||||
return 'Encrypted ' + self.base_field.description
|
||||
|
||||
def deconstruct(self):
|
||||
name, path, args, kwargs = super().deconstruct()
|
||||
kwargs.update({
|
||||
"base_field": self.base_field.clone(),
|
||||
})
|
||||
return name, path, args, kwargs
|
||||
|
||||
@elasticapm.capture_span()
|
||||
def get_db_prep_value(self, value, connection, prepared=False):
|
||||
if value is None:
|
||||
return value
|
||||
|
||||
if isinstance(self.base_field, models.JSONField):
|
||||
value = json.dumps(value, cls=self.base_field.encoder).encode()
|
||||
elif isinstance(self.base_field, models.BinaryField):
|
||||
pass
|
||||
else:
|
||||
value = self.base_field.get_db_prep_value(value=value, connection=connection, prepared=prepared)
|
||||
if isinstance(value, bytes):
|
||||
pass
|
||||
elif isinstance(value, str):
|
||||
value = value.encode()
|
||||
else:
|
||||
value = str(value).encode()
|
||||
|
||||
enc = io.BytesIO()
|
||||
with crypto.open(fileobj=enc, mode='wb') as c:
|
||||
c.write(value)
|
||||
value = enc.getvalue()
|
||||
|
||||
return super().get_db_prep_value(value=value, connection=connection, prepared=prepared)
|
||||
|
||||
@elasticapm.capture_span()
|
||||
def from_db_value(self, value, expression, connection):
|
||||
value = super().to_python(value)
|
||||
|
||||
if isinstance(value, (bytes, memoryview)):
|
||||
with crypto.open(fileobj=io.BytesIO(value), mode='rb') as c:
|
||||
value = crypto.readall(c)
|
||||
if not isinstance(self.base_field, models.BinaryField):
|
||||
value = value.decode()
|
||||
if hasattr(self.base_field, 'from_db_value'):
|
||||
value = self.base_field.from_db_value(value=value, expression=expression, connection=connection)
|
||||
return self.base_field.to_python(value)
|
||||
|
||||
def to_python(self, value):
|
||||
return self.base_field.to_python(value)
|
||||
|
||||
def value_to_string(self, obj):
|
||||
return self.base_field.value_to_string(obj)
|
||||
|
||||
def value_from_object(self, obj):
|
||||
return self.base_field.value_from_object(obj)
|
||||
|
||||
def formfield(self, **kwargs):
|
||||
return self.base_field.formfield(**kwargs)
|
||||
|
||||
def has_default(self) -> bool:
|
||||
return self.base_field.has_default()
|
||||
|
||||
def get_default(self):
|
||||
return self.base_field.get_default()
|
||||
|
|
@ -0,0 +1,55 @@
|
|||
from contextlib import contextmanager
|
||||
import tempfile
|
||||
import gnupg
|
||||
|
||||
from reportcreator_api.archive.crypto.base import CryptoError
|
||||
|
||||
|
||||
@contextmanager
|
||||
def create_gpg():
|
||||
with tempfile.TemporaryDirectory() as d:
|
||||
gpg = gnupg.GPG(gnupghome=d)
|
||||
gpg.encoding = 'utf-8'
|
||||
yield gpg
|
||||
|
||||
|
||||
def public_key_info(public_key: str):
|
||||
if not public_key:
|
||||
raise CryptoError('No public key provided')
|
||||
|
||||
with create_gpg() as gpg:
|
||||
with tempfile.NamedTemporaryFile(mode='w') as f:
|
||||
f.write(public_key)
|
||||
f.flush()
|
||||
res = gpg.scan_keys(f.name)
|
||||
if len(res) == 0:
|
||||
raise CryptoError('Invalid public key format')
|
||||
if len(res) != 1:
|
||||
raise CryptoError('Only 1 public key allowed')
|
||||
key_info = res[0]
|
||||
|
||||
if key_info.get('type') != 'pub':
|
||||
raise CryptoError('Not a public key')
|
||||
encryption_key_info = next(filter(lambda s: s.get('type') == 'sub' and s.get('cap') == 'e', key_info['subkey_info'].values()), None)
|
||||
if not encryption_key_info:
|
||||
raise CryptoError('No encryption key provided')
|
||||
|
||||
# Allowed encryption ciphers: RSA, ECDH, ElGamal with min. key size
|
||||
if encryption_key_info['algo'] not in ['1', '2', '16', '18']:
|
||||
raise CryptoError('Unsupported algorithm')
|
||||
if encryption_key_info['algo'] in ['1', '2', '16'] and int(encryption_key_info['length']) < 3072:
|
||||
raise CryptoError('Key length too short. The minimum supported RSA key size is 3072 bit')
|
||||
elif encryption_key_info['algo'] in ['18'] and int(encryption_key_info['length']) < 256:
|
||||
raise CryptoError('Key length too short. The minimum supported Elliptic Curve size is 256 bit')
|
||||
|
||||
return key_info
|
||||
|
||||
|
||||
def encrypt(data: bytes, public_key: str):
|
||||
with create_gpg() as gpg:
|
||||
res = gpg.import_keys(public_key)
|
||||
enc = gpg.encrypt(data=data, recipients=[res.results[0]['fingerprint']], always_trust=True)
|
||||
if not enc.ok:
|
||||
raise CryptoError('Encryption failed')
|
||||
return enc.data.decode()
|
||||
|
|
@ -0,0 +1,52 @@
|
|||
from Cryptodome.Protocol.SecretSharing import Shamir
|
||||
|
||||
|
||||
SHAMIR_BLOCK_SIZE = 16
|
||||
|
||||
|
||||
class ShamirLarge(Shamir):
|
||||
"""
|
||||
Shamir's secret sharing scheme with support for secrets larger than 128 bit.
|
||||
Code taken from unmerged PR: https://github.com/Legrandin/pycryptodome/pull/593/files
|
||||
"""
|
||||
|
||||
@staticmethod
|
||||
def split_large(k, n, secret, ssss=False):
|
||||
"""
|
||||
Wrapper for Shamir.split()
|
||||
when len(key) > SHAMIR_BLOCK_SIZE (16)
|
||||
"""
|
||||
if not isinstance(secret, bytes):
|
||||
raise TypeError("Secret must be bytes")
|
||||
if len(secret) % SHAMIR_BLOCK_SIZE != 0:
|
||||
raise ValueError(f"Secret size must be a multiple of {SHAMIR_BLOCK_SIZE}")
|
||||
|
||||
blocks = len(secret) // SHAMIR_BLOCK_SIZE
|
||||
shares = [b'' for _ in range(n)]
|
||||
for i in range(blocks):
|
||||
block_shares = Shamir.split(k, n,
|
||||
secret[i*SHAMIR_BLOCK_SIZE:(i+1)*SHAMIR_BLOCK_SIZE], ssss)
|
||||
for j in range(n):
|
||||
shares[j] += block_shares[j][1]
|
||||
return [(i+1,shares[i]) for i in range(n)]
|
||||
|
||||
@staticmethod
|
||||
def combine_large(shares, ssss=False):
|
||||
"""
|
||||
Wrapper for Shamir.combine()
|
||||
when len(key) > SHAMIR_BLOCK_SIZE (16)
|
||||
"""
|
||||
share_len = len(shares[0][1])
|
||||
for share in shares:
|
||||
if len(share[1]) % SHAMIR_BLOCK_SIZE:
|
||||
raise ValueError(f"Share #{share[0]} is not a multiple of {SHAMIR_BLOCK_SIZE}")
|
||||
if len(share[1]) != share_len:
|
||||
raise ValueError("Share sizes are inconsistent")
|
||||
blocks = share_len // SHAMIR_BLOCK_SIZE
|
||||
result = b''
|
||||
for i in range(blocks):
|
||||
block_shares = [
|
||||
(int(idx), share[i*SHAMIR_BLOCK_SIZE:(i+1)*SHAMIR_BLOCK_SIZE])
|
||||
for idx, share in shares]
|
||||
result += Shamir.combine(block_shares, ssss)
|
||||
return result
|
|
@ -0,0 +1,71 @@
|
|||
import io
|
||||
from typing import Iterator
|
||||
from uuid import uuid4
|
||||
from django.core.files import File
|
||||
from reportcreator_api.archive.crypto import base as crypto
|
||||
|
||||
|
||||
class IterableToFileAdapter(File):
|
||||
def __init__(self, iterable, name=None) -> None:
|
||||
super().__init__(file=None, name=name)
|
||||
self.iterator = iter(iterable)
|
||||
self.buffer = b''
|
||||
|
||||
def read(self, size=-1):
|
||||
while len(self.buffer) < size or size == -1:
|
||||
try:
|
||||
self.buffer += next(self.iterator)
|
||||
except StopIteration:
|
||||
break
|
||||
|
||||
out = self.buffer[:size]
|
||||
self.buffer = self.buffer[size:]
|
||||
return out
|
||||
|
||||
@property
|
||||
def closed(self) -> bool:
|
||||
return False
|
||||
|
||||
def seekable(self) -> bool:
|
||||
return False
|
||||
|
||||
|
||||
class EncryptedFileAdapter(File):
|
||||
def __init__(self, file, name=None, **kwargs) -> None:
|
||||
self._original_file = file
|
||||
self._crypto_kwargs = kwargs
|
||||
super().__init__(IterableToFileAdapter(self._encrypted_chunks(file), name or file.name))
|
||||
|
||||
def _encrypted_chunks(self, file, chunk_size=None):
|
||||
buf = io.BytesIO()
|
||||
with crypto.open(fileobj=buf, mode='wb', **self._crypto_kwargs) as c:
|
||||
for b in file.chunks(chunk_size=chunk_size):
|
||||
c.write(b)
|
||||
yield buf.getvalue()
|
||||
buf.truncate(0)
|
||||
buf.seek(0)
|
||||
yield buf.getvalue()
|
||||
|
||||
def chunks(self, chunk_size=None) -> Iterator[bytes]:
|
||||
return self._encrypted_chunks(self._original_file, chunk_size)
|
||||
|
||||
|
||||
class EncryptedStorageMixin:
|
||||
def open(self, name, mode='rb', **kwargs):
|
||||
return File(file=crypto.open(fileobj=super().open(name=name, mode=mode, **kwargs), mode=mode), name=name)
|
||||
|
||||
def save(self, name, content, max_length=None):
|
||||
return super().save(name=str(uuid4()), content=EncryptedFileAdapter(file=File(content)), max_length=max_length)
|
||||
|
||||
def size(self, name):
|
||||
size = super().size(name)
|
||||
with crypto.open(fileobj=super().open(name=name, mode='rb'), mode='r') as c:
|
||||
if hasattr(c, 'header_len') and hasattr(c, 'auth_tag_len'):
|
||||
size -= c.header_len + c.auth_tag_len
|
||||
return size
|
||||
|
||||
def get_available_name(self, name, max_length=None):
|
||||
randname = str(uuid4())
|
||||
randname_with_dir = randname[:2] + '/' + randname[2:]
|
||||
return super().get_available_name(name=randname_with_dir, max_length=None)
|
||||
|
|
@ -0,0 +1,8 @@
|
|||
from .import_export import export_project_types, export_projects, export_templates, \
|
||||
import_project_types, import_projects, import_templates
|
||||
|
||||
|
||||
__all__ = [
|
||||
'export_project_types', 'export_projects', 'export_templates',
|
||||
'import_project_types', 'import_projects', 'import_templates',
|
||||
]
|
|
@ -0,0 +1,186 @@
|
|||
import copy
|
||||
import io
|
||||
import json
|
||||
import logging
|
||||
from pathlib import Path
|
||||
import tarfile
|
||||
from typing import Iterable, Type
|
||||
from django.http import FileResponse
|
||||
from rest_framework import serializers
|
||||
from django.db import transaction
|
||||
from django.db.models import prefetch_related_objects, Prefetch
|
||||
from django.core.serializers.json import DjangoJSONEncoder
|
||||
|
||||
from reportcreator_api.archive.import_export.serializers import FindingTemplateExportImportSerializer, PentestProjectExportImportSerializer, ProjectTypeExportImportSerializer
|
||||
from reportcreator_api.pentests.models import FindingTemplate, NotebookPage, PentestFinding, PentestProject, ProjectMemberInfo, ProjectType, ReportSection
|
||||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
BLOCKSIZE = FileResponse.block_size
|
||||
|
||||
|
||||
def build_tarinfo(name, size):
|
||||
info = tarfile.TarInfo(name=name)
|
||||
info.size = size
|
||||
return info
|
||||
|
||||
|
||||
def _yield_chunks(buffer: io.BytesIO, last_chunk=False):
|
||||
"""
|
||||
Split buffer in chunks of BLOCKSIZE and yield them.
|
||||
Removes the returned chunks form the buffer.
|
||||
If last_chunks=True, return the last chunk even if it is less than BLOCKSIZE
|
||||
"""
|
||||
val = buffer.getvalue()
|
||||
buffer.truncate(0)
|
||||
buffer.seek(0)
|
||||
|
||||
num_chunks, len_remaining = divmod(len(val), BLOCKSIZE)
|
||||
for i in range(num_chunks):
|
||||
yield val[i * BLOCKSIZE:(i + 1) * BLOCKSIZE]
|
||||
|
||||
if len_remaining > 0:
|
||||
remaining = val[-len_remaining:]
|
||||
if last_chunk:
|
||||
yield remaining
|
||||
else:
|
||||
buffer.write(remaining)
|
||||
|
||||
|
||||
def _tarfile_addfile(buffer, archive: tarfile.TarFile, tarinfo, file_chunks) -> Iterable[bytes]:
|
||||
"""
|
||||
Re-implementation of TarFile.addfile() that yields chunks to integrate into Django StreamingHttpResponse
|
||||
"""
|
||||
archive._check("awx")
|
||||
|
||||
tarinfo = copy.copy(tarinfo)
|
||||
|
||||
buf = tarinfo.tobuf(archive.format, archive.encoding, archive.errors)
|
||||
archive.fileobj.write(buf)
|
||||
archive.offset += len(buf)
|
||||
|
||||
# re-implemented copyfileobj with yield after each block
|
||||
for chunk in file_chunks:
|
||||
archive.fileobj.write(chunk)
|
||||
yield from _yield_chunks(buffer)
|
||||
|
||||
blocks, remainder = divmod(tarinfo.size, tarfile.BLOCKSIZE)
|
||||
if remainder > 0:
|
||||
archive.fileobj.write(tarfile.NUL * (tarfile.BLOCKSIZE - remainder))
|
||||
blocks += 1
|
||||
archive.offset += blocks * tarfile.BLOCKSIZE
|
||||
yield from _yield_chunks(buffer)
|
||||
|
||||
archive.members.append(tarinfo)
|
||||
|
||||
|
||||
def export_archive_iter(data, serializer_class: Type[serializers.Serializer], context=None) -> Iterable[bytes]:
|
||||
try:
|
||||
buffer = io.BytesIO()
|
||||
|
||||
with tarfile.open(fileobj=buffer, mode='w|gz') as archive:
|
||||
context = (context or {}) | {
|
||||
'archive': archive,
|
||||
}
|
||||
for obj in data:
|
||||
serializer = serializer_class(instance=obj, context=context)
|
||||
data = serializer.export()
|
||||
archive_data = json.dumps(data, cls=DjangoJSONEncoder).encode()
|
||||
yield from _tarfile_addfile(
|
||||
buffer=buffer,
|
||||
archive=archive,
|
||||
tarinfo=build_tarinfo(name=f'{obj.id}.json', size=len(archive_data)),
|
||||
file_chunks=[archive_data]
|
||||
)
|
||||
|
||||
for name, file in serializer.export_files():
|
||||
yield from _tarfile_addfile(
|
||||
buffer=buffer,
|
||||
archive=archive,
|
||||
tarinfo=build_tarinfo(name=name, size=file.size),
|
||||
file_chunks=file.chunks()
|
||||
)
|
||||
|
||||
yield from _yield_chunks(buffer=buffer, last_chunk=True)
|
||||
except Exception as ex:
|
||||
logging.exception('Error while exporting archive')
|
||||
raise ex
|
||||
|
||||
|
||||
@transaction.atomic()
|
||||
def import_archive(archive_file, serializer_class: Type[serializers.Serializer]):
|
||||
context = {
|
||||
'archive': None,
|
||||
'storage_files': [],
|
||||
}
|
||||
|
||||
try:
|
||||
# We cannot use the streaming mode for import, because random access is required for importing files referenced in JSON
|
||||
# However, the tarfile library does not load everything into memory at once, only the archive member metadata (e.g. filename)
|
||||
# File contents are loaded only when reading them, but file reading can be streamed
|
||||
with tarfile.open(fileobj=archive_file, mode='r') as archive:
|
||||
context['archive'] = archive
|
||||
|
||||
# Get JSON files to import
|
||||
to_import = []
|
||||
for m in archive.getmembers():
|
||||
mp = Path(m.name)
|
||||
if m.isfile() and mp.match('*.json') and not mp.parent.parts:
|
||||
to_import.append(m.name)
|
||||
|
||||
# Perform import
|
||||
# The actual work is performed in serializers
|
||||
imported_objects = []
|
||||
for m in to_import:
|
||||
serializer = serializer_class(data=json.load(archive.extractfile(m)), context=context)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
obj = serializer.perform_import()
|
||||
log.info(f'Imported object {obj=} {obj.id=}')
|
||||
imported_objects.append(obj)
|
||||
|
||||
return imported_objects
|
||||
except Exception as ex:
|
||||
# Rollback partially imported data. DB rollback is done in the decorator
|
||||
log.exception('Error while importing archive. Rolling back import.')
|
||||
|
||||
for f in context.get('storage_files', []):
|
||||
try:
|
||||
f.delete()
|
||||
except Exception:
|
||||
log.exception(f'Failed to delete imported file "{f.name}" during rollback')
|
||||
raise ex
|
||||
|
||||
|
||||
def export_templates(data: Iterable[FindingTemplate]):
|
||||
return export_archive_iter(data, serializer_class=FindingTemplateExportImportSerializer)
|
||||
|
||||
def export_project_types(data: Iterable[ProjectType]):
|
||||
prefetch_related_objects(data, 'assets')
|
||||
return export_archive_iter(data, serializer_class=ProjectTypeExportImportSerializer)
|
||||
|
||||
def export_projects(data: Iterable[PentestProject], export_all=False):
|
||||
prefetch_related_objects(
|
||||
data,
|
||||
Prefetch('findings', PentestFinding.objects.select_related('assignee')),
|
||||
Prefetch('sections', ReportSection.objects.select_related('assignee')),
|
||||
Prefetch('notes', NotebookPage.objects.select_related('parent')),
|
||||
Prefetch('members', ProjectMemberInfo.objects.select_related('user')),
|
||||
'images',
|
||||
'project_type__assets',
|
||||
)
|
||||
return export_archive_iter(data, serializer_class=PentestProjectExportImportSerializer, context={
|
||||
'export_all': export_all,
|
||||
})
|
||||
|
||||
|
||||
def import_templates(archive_file):
|
||||
return import_archive(archive_file, serializer_class=FindingTemplateExportImportSerializer)
|
||||
|
||||
def import_project_types(archive_file):
|
||||
return import_archive(archive_file, serializer_class=ProjectTypeExportImportSerializer)
|
||||
|
||||
def import_projects(archive_file):
|
||||
return import_archive(archive_file, serializer_class=PentestProjectExportImportSerializer)
|
||||
|
|
@ -0,0 +1,405 @@
|
|||
from typing import Iterable
|
||||
from django.core.files import File
|
||||
from django.core.exceptions import ObjectDoesNotExist
|
||||
from rest_framework import serializers
|
||||
from reportcreator_api.pentests.customfields.utils import HandleUndefinedFieldsOptions, ensure_defined_structure
|
||||
|
||||
from reportcreator_api.pentests.models import FindingTemplate, NotebookPage, PentestFinding, PentestProject, ProjectType, ReportSection, \
|
||||
SourceEnum, UploadedAsset, UploadedImage, UploadedFileBase, ProjectMemberInfo, UploadedProjectFile
|
||||
from reportcreator_api.pentests.serializers import ProjectMemberInfoSerializer
|
||||
from reportcreator_api.users.models import PentestUser
|
||||
from reportcreator_api.users.serializers import RelatedUserSerializer
|
||||
from reportcreator_api.utils.utils import omit_keys
|
||||
|
||||
|
||||
class ExportImportSerializer(serializers.ModelSerializer):
|
||||
def perform_import(self):
|
||||
return self.create(self.validated_data.copy())
|
||||
|
||||
def export(self):
|
||||
return self.data
|
||||
|
||||
def export_files(self) -> Iterable[tuple[str, File]]:
|
||||
return []
|
||||
|
||||
|
||||
class FormatField(serializers.Field):
|
||||
def __init__(self, format):
|
||||
self.format = format
|
||||
self.default_validators = [self._validate_format]
|
||||
super().__init__()
|
||||
|
||||
def _validate_format(self, v):
|
||||
if v != self.format:
|
||||
raise serializers.ValidationError(f'Invalid format: expected "{self.format}" got "{v}"')
|
||||
else:
|
||||
raise serializers.SkipField()
|
||||
|
||||
def get_attribute(self, instance):
|
||||
return self.format
|
||||
|
||||
def to_representation(self, value):
|
||||
return value
|
||||
|
||||
def to_internal_value(self, value):
|
||||
return value
|
||||
|
||||
|
||||
class UserIdSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = PentestUser
|
||||
fields = ['id']
|
||||
|
||||
|
||||
class RelatedUserIdExportImportSerializer(RelatedUserSerializer):
|
||||
def __init__(self, **kwargs):
|
||||
super().__init__(user_serializer=UserIdSerializer, **{'required': False, 'allow_null': True, 'default': None} | kwargs)
|
||||
|
||||
def to_internal_value(self, data):
|
||||
try:
|
||||
return super().to_internal_value(data)
|
||||
except PentestUser.DoesNotExist:
|
||||
# If user does not exit: ignore
|
||||
raise serializers.SkipField()
|
||||
|
||||
|
||||
class UserDataSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = PentestUser
|
||||
fields = [
|
||||
'id', 'email', 'phone', 'mobile',
|
||||
'name', 'title_before', 'first_name', 'middle_name', 'last_name', 'title_after',
|
||||
]
|
||||
extra_kwargs = {'id': {'read_only': False}}
|
||||
|
||||
|
||||
class RelatedUserDataExportImportSerializer(ProjectMemberInfoSerializer):
|
||||
def __init__(self, **kwargs):
|
||||
super().__init__(user_serializer=UserDataSerializer, **kwargs)
|
||||
|
||||
def to_internal_value(self, data):
|
||||
try:
|
||||
return ProjectMemberInfo(**super().to_internal_value(data))
|
||||
except PentestUser.DoesNotExist:
|
||||
return data
|
||||
|
||||
|
||||
class ProjectMemberListExportImportSerializer(serializers.ListSerializer):
|
||||
child = RelatedUserDataExportImportSerializer()
|
||||
|
||||
def to_representation(self, project):
|
||||
return super().to_representation(project.members.all()) + project.imported_members
|
||||
|
||||
def to_internal_value(self, data):
|
||||
return {self.field_name: super().to_internal_value(data)}
|
||||
|
||||
|
||||
class OptionalPrimaryKeyRelatedField(serializers.PrimaryKeyRelatedField):
|
||||
def __init__(self, **kwargs):
|
||||
super().__init__(**{'required': False, 'allow_null': True, 'default': None} | kwargs)
|
||||
|
||||
def to_internal_value(self, data):
|
||||
if data is None:
|
||||
raise serializers.SkipField()
|
||||
try:
|
||||
return self.get_queryset().get(pk=data)
|
||||
except ObjectDoesNotExist:
|
||||
raise serializers.SkipField()
|
||||
|
||||
|
||||
class FindingTemplateExportImportSerializer(ExportImportSerializer):
|
||||
format = FormatField('templates/v1')
|
||||
|
||||
data = serializers.DictField(source='data_all')
|
||||
|
||||
class Meta:
|
||||
model = FindingTemplate
|
||||
fields = ['format', 'id', 'created', 'updated', 'tags', 'language', 'status', 'data']
|
||||
extra_kwargs = {'id': {'read_only': True}, 'created': {'read_only': False}}
|
||||
|
||||
def create(self, validated_data):
|
||||
data = validated_data.pop('data_all', {})
|
||||
template = FindingTemplate(**{
|
||||
'source': SourceEnum.IMPORTED,
|
||||
} | validated_data)
|
||||
template.update_data(data)
|
||||
template.save()
|
||||
return template
|
||||
|
||||
|
||||
class FileListExportImportSerializer(serializers.ListSerializer):
|
||||
def export_files(self):
|
||||
for e in self.instance:
|
||||
self.child.instance = e
|
||||
yield from self.child.export_files()
|
||||
|
||||
def extract_file(self, name):
|
||||
return self.context['archive'].extractfile(self.child.get_path_in_archive(name))
|
||||
|
||||
def create(self, validated_data):
|
||||
child_model_class = self.child.Meta.model
|
||||
objs = [
|
||||
child_model_class(**attrs | {
|
||||
'name_hash': UploadedFileBase.hash_name(attrs['name']),
|
||||
'file': File(
|
||||
file=self.extract_file(attrs['name']),
|
||||
name=attrs['name']),
|
||||
'linked_object': self.child.get_linked_object()
|
||||
}) for attrs in validated_data]
|
||||
|
||||
child_model_class.objects.bulk_create(objs)
|
||||
self.context['storage_files'].extend(map(lambda o: o.file, objs))
|
||||
return objs
|
||||
|
||||
|
||||
class FileExportImportSerializer(ExportImportSerializer):
|
||||
class Meta:
|
||||
fields = ['id', 'created', 'updated', 'name']
|
||||
extra_kwargs = {'id': {'read_only': True}, 'created': {'read_only': False}}
|
||||
list_serializer_class = FileListExportImportSerializer
|
||||
|
||||
def validate_name(self, name):
|
||||
if '/' in name or '\\' in name or '\x00' in name:
|
||||
raise serializers.ValidationError(f'Invalid filename: {name}')
|
||||
return name
|
||||
|
||||
def get_linked_object(self):
|
||||
pass
|
||||
|
||||
def get_path_in_archive(self, name):
|
||||
pass
|
||||
|
||||
def export_files(self) -> Iterable[tuple[str, File]]:
|
||||
yield self.get_path_in_archive(self.instance.name), self.instance.file
|
||||
|
||||
|
||||
class UploadedImageExportImportSerializer(FileExportImportSerializer):
|
||||
class Meta(FileExportImportSerializer.Meta):
|
||||
model = UploadedImage
|
||||
|
||||
def get_linked_object(self):
|
||||
return self.context['project']
|
||||
|
||||
def get_path_in_archive(self, name):
|
||||
# Get ID of old project_type from archive
|
||||
return str(self.context.get('project_id') or self.get_linked_object().id) + '-images/' + name
|
||||
|
||||
|
||||
class UploadedProjectFileExportImportSerializer(FileExportImportSerializer):
|
||||
class Meta(FileExportImportSerializer.Meta):
|
||||
model = UploadedProjectFile
|
||||
|
||||
def get_linked_object(self):
|
||||
return self.context['project']
|
||||
|
||||
def get_path_in_archive(self, name):
|
||||
# Get ID of old project_type from archive
|
||||
return str(self.context.get('project_id') or self.get_linked_object().id) + '-files/' + name
|
||||
|
||||
|
||||
class UploadedAssetExportImportSerializer(FileExportImportSerializer):
|
||||
class Meta(FileExportImportSerializer.Meta):
|
||||
model = UploadedAsset
|
||||
|
||||
def get_linked_object(self):
|
||||
return self.context['project_type']
|
||||
|
||||
def get_path_in_archive(self, name):
|
||||
# Get ID of old project_type from archive
|
||||
return str(self.context.get('project_type_id') or self.get_linked_object().id) + '-assets/' + name
|
||||
|
||||
|
||||
class ProjectTypeExportImportSerializer(ExportImportSerializer):
|
||||
format = FormatField('projecttypes/v1')
|
||||
assets = UploadedAssetExportImportSerializer(many=True)
|
||||
|
||||
class Meta:
|
||||
model = ProjectType
|
||||
fields = [
|
||||
'format', 'id', 'created', 'updated', 'name', 'language',
|
||||
'report_fields', 'report_sections', 'finding_fields', 'finding_field_order',
|
||||
'report_template', 'report_styles', 'report_preview_data',
|
||||
'assets'
|
||||
]
|
||||
extra_kwargs = {'id': {'read_only': False}, 'created': {'read_only': False}}
|
||||
|
||||
def export_files(self) -> Iterable[tuple[str, File]]:
|
||||
af = self.fields['assets']
|
||||
self.context.update({'project_type': self.instance})
|
||||
af.instance = list(af.get_attribute(self.instance).all())
|
||||
yield from af.export_files()
|
||||
|
||||
def create(self, validated_data):
|
||||
old_id = validated_data.pop('id')
|
||||
assets = validated_data.pop('assets', [])
|
||||
project_type = super().create({
|
||||
'source': SourceEnum.IMPORTED,
|
||||
} | validated_data)
|
||||
self.context.update({'project_type': project_type, 'project_type_id': old_id})
|
||||
self.fields['assets'].create(assets)
|
||||
return project_type
|
||||
|
||||
|
||||
class PentestFindingExportImportSerializer(ExportImportSerializer):
|
||||
id = serializers.UUIDField(source='finding_id')
|
||||
assignee = RelatedUserIdExportImportSerializer()
|
||||
template = OptionalPrimaryKeyRelatedField(queryset=FindingTemplate.objects.all(), source='template_id')
|
||||
data = serializers.DictField(source='data_all')
|
||||
|
||||
class Meta:
|
||||
model = PentestFinding
|
||||
fields = [
|
||||
'id', 'created', 'updated', 'assignee', 'status', 'template', 'data',
|
||||
]
|
||||
extra_kwargs = {'created': {'read_only': False}}
|
||||
|
||||
def create(self, validated_data):
|
||||
project = self.context['project']
|
||||
data = validated_data.pop('data_all', {})
|
||||
template = validated_data.pop('template_id', None)
|
||||
finding = PentestFinding(**{
|
||||
'project': project,
|
||||
'template_id': template.id if template else None,
|
||||
} | validated_data)
|
||||
finding.update_data(ensure_defined_structure(
|
||||
value=data,
|
||||
definition=project.project_type.finding_fields_obj,
|
||||
handle_undefined=HandleUndefinedFieldsOptions.FILL_NONE,
|
||||
include_undefined=True)
|
||||
)
|
||||
finding.save()
|
||||
return finding
|
||||
|
||||
|
||||
class ReportSectionExportImportSerializer(ExportImportSerializer):
|
||||
id = serializers.CharField(source='section_id')
|
||||
assignee = RelatedUserIdExportImportSerializer()
|
||||
|
||||
class Meta:
|
||||
model = ReportSection
|
||||
fields = [
|
||||
'id', 'created', 'updated', 'assignee', 'status',
|
||||
]
|
||||
extra_kwargs = {'created': {'read_only': False}}
|
||||
|
||||
|
||||
class NotebookPageExportImportSerializer(ExportImportSerializer):
|
||||
id = serializers.UUIDField(source='note_id')
|
||||
parent = serializers.UUIDField(source='parent.note_id', allow_null=True)
|
||||
|
||||
class Meta:
|
||||
model = NotebookPage
|
||||
fields = [
|
||||
'id', 'created', 'updated',
|
||||
'title', 'text', 'checked', 'icon_emoji', 'status_emoji',
|
||||
'order', 'parent',
|
||||
]
|
||||
extra_kwargs = {
|
||||
'created': {'read_only': False},
|
||||
'icon_emoji': {'required': False},
|
||||
'status_emoji': {'required': False},
|
||||
}
|
||||
|
||||
|
||||
class NotebookPageListExportImportSerializer(serializers.ListSerializer):
|
||||
child = NotebookPageExportImportSerializer()
|
||||
|
||||
def create(self, validated_data):
|
||||
instances = [NotebookPage(project=self.context['project'], **omit_keys(d, ['parent'])) for d in validated_data]
|
||||
for i, d in zip(instances, validated_data):
|
||||
if d.get('parent'):
|
||||
i.parent = next(filter(lambda e: e.note_id == d.get('parent', {}).get('note_id'), instances), None)
|
||||
|
||||
NotebookPage.objects.check_parent_and_order(instances)
|
||||
NotebookPage.objects.bulk_create(instances)
|
||||
return instances
|
||||
|
||||
|
||||
class PentestProjectExportImportSerializer(ExportImportSerializer):
|
||||
format = FormatField('projects/v1')
|
||||
members = ProjectMemberListExportImportSerializer(source='*', required=False)
|
||||
pentesters = ProjectMemberListExportImportSerializer(required=False, write_only=True)
|
||||
project_type = ProjectTypeExportImportSerializer()
|
||||
report_data = serializers.DictField(source='data_all')
|
||||
sections = ReportSectionExportImportSerializer(many=True)
|
||||
findings = PentestFindingExportImportSerializer(many=True)
|
||||
notes = NotebookPageListExportImportSerializer(required=False)
|
||||
images = UploadedImageExportImportSerializer(many=True)
|
||||
files = UploadedProjectFileExportImportSerializer(many=True, required=False)
|
||||
|
||||
class Meta:
|
||||
model = PentestProject
|
||||
fields = [
|
||||
'format', 'id', 'created', 'updated', 'name', 'language',
|
||||
'members', 'pentesters', 'project_type',
|
||||
'report_data', 'sections', 'findings', 'notes', 'images', 'files',
|
||||
]
|
||||
extra_kwargs = {'id': {'read_only': False}, 'created': {'read_only': False}}
|
||||
|
||||
def get_fields(self):
|
||||
fields = super().get_fields()
|
||||
if not self.context.get('export_all', True):
|
||||
del fields['notes']
|
||||
del fields['files']
|
||||
return fields
|
||||
|
||||
def export_files(self) -> Iterable[tuple[str, File]]:
|
||||
self.fields['project_type'].instance = self.instance.project_type
|
||||
yield from self.fields['project_type'].export_files()
|
||||
|
||||
self.context.update({'project': self.instance})
|
||||
|
||||
imgf = self.fields['images']
|
||||
imgf.instance = list(imgf.get_attribute(self.instance).all())
|
||||
yield from imgf.export_files()
|
||||
|
||||
if ff := self.fields.get('files'):
|
||||
ff.instance = list(ff.get_attribute(self.instance).all())
|
||||
yield from ff.export_files()
|
||||
|
||||
def create(self, validated_data):
|
||||
old_id = validated_data.pop('id')
|
||||
members = validated_data.pop('members', validated_data.pop('pentesters', []))
|
||||
project_type_data = validated_data.pop('project_type', {})
|
||||
sections = validated_data.pop('sections', [])
|
||||
findings = validated_data.pop('findings', [])
|
||||
notes = validated_data.pop('notes', [])
|
||||
report_data = validated_data.pop('data_all', {})
|
||||
images_data = validated_data.pop('images', [])
|
||||
files_data = validated_data.pop('files', [])
|
||||
|
||||
project_type = self.fields['project_type'].create(project_type_data | {
|
||||
'source': SourceEnum.IMPORTED_DEPENDENCY,
|
||||
})
|
||||
project = super().create(validated_data | {
|
||||
'project_type': project_type,
|
||||
'imported_members': list(filter(lambda u: isinstance(u, dict), members)),
|
||||
'source': SourceEnum.IMPORTED,
|
||||
'custom_fields': ensure_defined_structure(
|
||||
value=report_data,
|
||||
definition=project_type.report_fields_obj,
|
||||
handle_undefined=HandleUndefinedFieldsOptions.FILL_NONE,
|
||||
include_undefined=True
|
||||
),
|
||||
})
|
||||
project_type.linked_project = project
|
||||
project_type.save()
|
||||
|
||||
member_infos = list(filter(lambda u: isinstance(u, ProjectMemberInfo), members))
|
||||
for mi in member_infos:
|
||||
mi.project = project
|
||||
ProjectMemberInfo.objects.bulk_create(member_infos)
|
||||
|
||||
self.context.update({'project': project, 'project_id': old_id})
|
||||
|
||||
for section in project.sections.all():
|
||||
if section_data := next(filter(lambda s: s.get('section_id') == section.section_id, sections), None):
|
||||
self.fields['sections'].child.update(section, section_data)
|
||||
|
||||
self.fields['findings'].create(findings)
|
||||
self.fields['notes'].create(notes)
|
||||
self.fields['images'].create(images_data)
|
||||
self.fields['files'].create(files_data)
|
||||
|
||||
return project
|
||||
|
|
@ -0,0 +1,12 @@
|
|||
from django.contrib.admin.apps import AdminConfig as AdminConfigBase
|
||||
from django.contrib.admin.sites import AdminSite as AdminSiteBase
|
||||
|
||||
|
||||
class AdminConfig(AdminConfigBase):
|
||||
default_site = 'reportcreator_api.conf.admin.AdminSite'
|
||||
|
||||
|
||||
class AdminSite(AdminSiteBase):
|
||||
def has_permission(self, request):
|
||||
return request.user and not request.user.is_anonymous and request.user.is_admin
|
||||
|
|
@ -0,0 +1,16 @@
|
|||
"""
|
||||
ASGI config for reportcreator_api project.
|
||||
|
||||
It exposes the ASGI callable as a module-level variable named ``application``.
|
||||
|
||||
For more information on this file, see
|
||||
https://docs.djangoproject.com/en/4.0/howto/deployment/asgi/
|
||||
"""
|
||||
|
||||
import os
|
||||
|
||||
from django.core.asgi import get_asgi_application
|
||||
|
||||
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'reportcreator_api.conf.settings')
|
||||
|
||||
application = get_asgi_application()
|
|
@ -0,0 +1,29 @@
|
|||
import os
|
||||
from celery import Celery, signals
|
||||
|
||||
|
||||
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'reportcreator_api.conf.settings')
|
||||
|
||||
|
||||
celery_app = Celery(
|
||||
'reportcreator',
|
||||
fixups=Celery.builtin_fixups | {
|
||||
'reportcreator_api.tasks.rendering.celery_worker:SecureWorkerFixup'
|
||||
}
|
||||
)
|
||||
|
||||
# Using a string here means the worker doesn't have to serialize
|
||||
# the configuration object to child processes.
|
||||
# - namespace='CELERY' means all celery-related configuration keys
|
||||
# should have a `CELERY_` prefix.
|
||||
celery_app.config_from_object('django.conf:settings', namespace='CELERY')
|
||||
|
||||
# Load task modules from all registered Django apps.
|
||||
celery_app.autodiscover_tasks()
|
||||
|
||||
|
||||
@signals.setup_logging.connect()
|
||||
def setup_logging(*args, **kwargs):
|
||||
import logging.config
|
||||
from django.conf import settings
|
||||
logging.config.dictConfig(settings.LOGGING)
|
|
@ -0,0 +1,561 @@
|
|||
"""
|
||||
Django settings for reportcreator_api project.
|
||||
|
||||
Generated by 'django-admin startproject' using Django 4.0.4.
|
||||
|
||||
For more information on this file, see
|
||||
https://docs.djangoproject.com/en/4.0/topics/settings/
|
||||
|
||||
For the full list of settings and their values, see
|
||||
https://docs.djangoproject.com/en/4.0/ref/settings/
|
||||
"""
|
||||
|
||||
from datetime import timedelta
|
||||
from decouple import config
|
||||
from pathlib import Path
|
||||
import json
|
||||
from urllib.parse import urljoin
|
||||
|
||||
# Build paths inside the project like this: BASE_DIR / 'subdir'.
|
||||
BASE_DIR = Path(__file__).resolve().parent.parent.parent
|
||||
MEDIA_ROOT = config('MEDIA_ROOT', default=BASE_DIR / 'data', cast=Path)
|
||||
|
||||
|
||||
# Quick-start development settings - unsuitable for production
|
||||
# See https://docs.djangoproject.com/en/4.0/howto/deployment/checklist/
|
||||
|
||||
# SECURITY WARNING: keep the secret key used in production secret!
|
||||
SECRET_KEY = config('SECRET_KEY', default='django-insecure-ygvn9(x==kcv#r%pccf4rlzyz7_1v1b83$19&b2lsj6uz$mbro')
|
||||
|
||||
# SECURITY WARNING: don't run with debug turned on in production!
|
||||
DEBUG = config('DEBUG', cast=bool, default=False)
|
||||
|
||||
ALLOWED_HOSTS = ['*']
|
||||
APPEND_SLASH = True
|
||||
|
||||
# Application definition
|
||||
|
||||
INSTALLED_APPS = [
|
||||
'django.contrib.auth',
|
||||
'django.contrib.contenttypes',
|
||||
'django.contrib.messages',
|
||||
'django.contrib.staticfiles',
|
||||
'django.contrib.sessions',
|
||||
|
||||
'rest_framework',
|
||||
'django_filters',
|
||||
'adrf',
|
||||
|
||||
'reportcreator_api',
|
||||
'reportcreator_api.users',
|
||||
'reportcreator_api.pentests',
|
||||
'reportcreator_api.notifications',
|
||||
'reportcreator_api.tasks',
|
||||
'reportcreator_api.conf.admin.AdminConfig',
|
||||
'reportcreator_api.api_utils',
|
||||
]
|
||||
|
||||
MIDDLEWARE = [
|
||||
'whitenoise.middleware.WhiteNoiseMiddleware',
|
||||
|
||||
'reportcreator_api.utils.logging.RequestLoggingMiddleware',
|
||||
'django.middleware.security.SecurityMiddleware',
|
||||
'django.contrib.sessions.middleware.SessionMiddleware',
|
||||
'reportcreator_api.utils.middleware.ExtendSessionMiddleware',
|
||||
'django.middleware.common.CommonMiddleware',
|
||||
'django.middleware.csrf.CsrfViewMiddleware',
|
||||
'django.contrib.auth.middleware.AuthenticationMiddleware',
|
||||
'reportcreator_api.utils.middleware.AdminSessionMiddleware',
|
||||
'django.contrib.messages.middleware.MessageMiddleware',
|
||||
'django.middleware.clickjacking.XFrameOptionsMiddleware',
|
||||
'csp.middleware.CSPMiddleware',
|
||||
'reportcreator_api.utils.middleware.CacheControlMiddleware',
|
||||
'reportcreator_api.utils.middleware.PermissionsPolicyMiddleware',
|
||||
]
|
||||
|
||||
ROOT_URLCONF = 'reportcreator_api.conf.urls'
|
||||
|
||||
TEMPLATES = [
|
||||
{
|
||||
'BACKEND': 'django.template.backends.django.DjangoTemplates',
|
||||
'DIRS': [BASE_DIR / 'frontend'],
|
||||
'APP_DIRS': True,
|
||||
'OPTIONS': {
|
||||
'context_processors': [
|
||||
'django.template.context_processors.debug',
|
||||
'django.template.context_processors.request',
|
||||
'django.contrib.auth.context_processors.auth',
|
||||
'django.contrib.messages.context_processors.messages',
|
||||
],
|
||||
},
|
||||
},
|
||||
]
|
||||
|
||||
REST_FRAMEWORK = {
|
||||
# Use Django's standard `django.contrib.auth` permissions,
|
||||
# or allow read-only access for unauthenticated users.
|
||||
'DEFAULT_PERMISSION_CLASSES': [
|
||||
'rest_framework.permissions.IsAuthenticated',
|
||||
],
|
||||
'DEFAULT_AUTHENTICATION_CLASSES': [
|
||||
'rest_framework.authentication.SessionAuthentication',
|
||||
],
|
||||
'DEFAULT_THROTTLE_CLASSES': [
|
||||
'reportcreator_api.utils.throttling.ScopedUserRateThrottle',
|
||||
],
|
||||
'DEFAULT_THROTTLE_RATES': {
|
||||
'pdf': '3/10s',
|
||||
},
|
||||
'DEFAULT_PAGINATION_CLASS': 'rest_framework.pagination.CursorPagination',
|
||||
'EXCEPTION_HANDLER': 'reportcreator_api.utils.api.exception_handler',
|
||||
'PAGE_SIZE': 100,
|
||||
'UNICODE_JSON': False,
|
||||
}
|
||||
|
||||
|
||||
WSGI_APPLICATION = 'reportcreator_api.conf.wsgi.application'
|
||||
|
||||
|
||||
# Database
|
||||
# https://docs.djangoproject.com/en/4.0/ref/settings/#databases
|
||||
|
||||
DATABASES = {
|
||||
'default': {
|
||||
'ENGINE': config('DATABASE_ENGINE', default='django.db.backends.postgresql'),
|
||||
'HOST': config('DATABASE_HOST', default=''),
|
||||
'PORT': config('DATABASE_PORT', default='5432'),
|
||||
'NAME': config('DATABASE_NAME', default=''),
|
||||
'USER': config('DATABASE_USER', default=''),
|
||||
'PASSWORD': config('DATABASE_PASSWORD', default=''),
|
||||
'DISABLE_SERVER_SIDE_CURSORS': True,
|
||||
'OPTIONS': {
|
||||
'prepare_threshold': None,
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
# Password validation
|
||||
# https://docs.djangoproject.com/en/4.0/ref/settings/#auth-password-validators
|
||||
|
||||
AUTH_PASSWORD_VALIDATORS = [
|
||||
{
|
||||
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
|
||||
},
|
||||
{
|
||||
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
|
||||
},
|
||||
{
|
||||
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
|
||||
},
|
||||
{
|
||||
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
|
||||
},
|
||||
]
|
||||
|
||||
# Login URL of SPA frontend
|
||||
LOGIN_URL = '/login/'
|
||||
|
||||
SESSION_ENGINE = 'reportcreator_api.users.backends.session'
|
||||
SESSION_EXPIRE_AT_BROWSER_CLOSE = True
|
||||
SESSION_COOKIE_AGE = timedelta(hours=14).seconds
|
||||
SESSION_COOKIE_HTTPONLY = True
|
||||
SESSION_COOKIE_SAMESITE = 'Strict'
|
||||
CSRF_COOKIE_SAMESITE = 'Strict'
|
||||
CSRF_TRUSTED_ORIGINS = ['https://*', 'http://*']
|
||||
|
||||
MFA_SERVER_NAME = config('MFA_SERVER_NAME', default='SysReptor')
|
||||
# FIDO2 RP ID: the domain name of the instance
|
||||
MFA_FIDO2_RP_ID = config('MFA_FIDO2_RP_ID', default='')
|
||||
MFA_LOGIN_TIMEOUT = timedelta(minutes=5)
|
||||
SENSITIVE_OPERATION_REAUTHENTICATION_TIMEOUT = timedelta(minutes=15)
|
||||
|
||||
import fido2.features
|
||||
fido2.features.webauthn_json_mapping.enabled = True
|
||||
|
||||
|
||||
|
||||
AUTHLIB_OAUTH_CLIENTS = {}
|
||||
OIDC_AZURE_CLIENT_ID = config('OIDC_AZURE_CLIENT_ID', default=None)
|
||||
OIDC_AZURE_CLIENT_SECRET = config('OIDC_AZURE_CLIENT_SECRET', default=None)
|
||||
OIDC_AZURE_TENANT_ID = config('OIDC_AZURE_TENANT_ID', default=None)
|
||||
if OIDC_AZURE_CLIENT_ID and OIDC_AZURE_CLIENT_SECRET and OIDC_AZURE_TENANT_ID:
|
||||
AUTHLIB_OAUTH_CLIENTS |= {
|
||||
'azure': {
|
||||
'label': 'Azure AD',
|
||||
'client_id': OIDC_AZURE_CLIENT_ID,
|
||||
'client_secret': OIDC_AZURE_CLIENT_SECRET,
|
||||
'server_metadata_url': f'https://login.microsoftonline.com/{OIDC_AZURE_TENANT_ID}/v2.0/.well-known/openid-configuration',
|
||||
'client_kwargs': {
|
||||
'scope': 'openid email profile',
|
||||
'code_challenge_method': 'S256',
|
||||
},
|
||||
},
|
||||
}
|
||||
if oidc_config := config('OIDC_AUTHLIB_OAUTH_CLIENTS', cast=json.loads, default="{}"):
|
||||
AUTHLIB_OAUTH_CLIENTS |= oidc_config
|
||||
|
||||
# Internationalization
|
||||
# https://docs.djangoproject.com/en/4.0/topics/i18n/
|
||||
|
||||
LANGUAGE_CODE = 'en-us'
|
||||
|
||||
TIME_ZONE = 'UTC'
|
||||
|
||||
USE_I18N = True
|
||||
|
||||
USE_TZ = True
|
||||
|
||||
|
||||
# Static files (CSS, JavaScript, Images)
|
||||
# https://docs.djangoproject.com/en/4.0/howto/static-files/
|
||||
|
||||
MEDIA_URL = 'data/'
|
||||
STATIC_URL = 'static/'
|
||||
STATIC_ROOT = BASE_DIR / 'static'
|
||||
STATICFILES_DIRS = [
|
||||
BASE_DIR / 'frontend' / 'static',
|
||||
]
|
||||
|
||||
UPLOADED_FILE_STORAGE = config('UPLOADED_FILE_STORAGE', default='filesystem')
|
||||
UPLOADED_FILE_STORAGE = {
|
||||
'filesystem': 'reportcreator_api.utils.storages.EncryptedFileSystemStorage',
|
||||
's3': 'reportcreator_api.utils.storages.EncryptedS3SystemStorage',
|
||||
}.get(UPLOADED_FILE_STORAGE, UPLOADED_FILE_STORAGE)
|
||||
|
||||
STORAGES = {
|
||||
'staticfiles': {
|
||||
'BACKEND': 'django.contrib.staticfiles.storage.ManifestStaticFilesStorage',
|
||||
},
|
||||
'uploaded_images': {
|
||||
'BACKEND': 'reportcreator_api.utils.storages.EncryptedFileSystemStorage',
|
||||
'OPTIONS': {
|
||||
'location': config('UPLOADED_IMAGE_LOCATION', default=MEDIA_ROOT / 'uploadedimages', cast=Path),
|
||||
},
|
||||
},
|
||||
'uploaded_assets': {
|
||||
'BACKEND': 'reportcreator_api.utils.storages.EncryptedFileSystemStorage',
|
||||
'OPTIONS': {
|
||||
'location': config('UPLOADED_ASSET_LOCATION', default=MEDIA_ROOT / 'uploadedassets', cast=Path)
|
||||
},
|
||||
},
|
||||
'uploaded_files': {
|
||||
'BACKEND': UPLOADED_FILE_STORAGE,
|
||||
'OPTIONS': {
|
||||
'location': config('UPLOADED_FILE_LOCATION', default=MEDIA_ROOT / 'uploadedfiles', cast=Path),
|
||||
'access_key': config('UPLOADED_FILE_S3_ACCESS_KEY', default=''),
|
||||
'secret_key': config('UPLOADED_FILE_S3_SECRET_KEY', default=''),
|
||||
'security_token': config('UPLOADED_FILE_S3_SESSION_TOKEN', default=None),
|
||||
'bucket_name': config('UPLOADED_FILE_S3_BUCKET_NAME', default=''),
|
||||
'endpoint_url': config('UPLOADED_FILE_S3_ENDPOINT_URL', default=''),
|
||||
},
|
||||
},
|
||||
'archived_files': {
|
||||
'BACKEND': config('ARCHIVED_FILE_STORAGE', default='reportcreator_api.utils.storages.UnencryptedFileSystemStorage'),
|
||||
'OPTIONS': {
|
||||
'location': config('ARCHIVED_FILE_LOCATION', default=MEDIA_ROOT / 'archivedfiles', cast=Path),
|
||||
'access_key': config('ARCHIVED_FILE_S3_ACCESS_KEY', default=''),
|
||||
'secret_key': config('ARCHIVED_FILE_S3_SECRET_KEY', default=''),
|
||||
'security_token': config('ARCHIVED_FILE_S3_SESSION_TOKEN', default=None),
|
||||
'bucket_name': config('ARCHIVED_FILE_S3_BUCKET_NAME', default=''),
|
||||
'endpoint_url': config('ARCHIVED_FILE_S3_ENDPOINT_URL', default=''),
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
from pillow_heif import register_heif_opener
|
||||
register_heif_opener()
|
||||
|
||||
|
||||
# Default primary key field type
|
||||
# https://docs.djangoproject.com/en/4.0/ref/settings/#default-auto-field
|
||||
DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField'
|
||||
AUTH_USER_MODEL = 'users.PentestUser'
|
||||
|
||||
|
||||
# HTTP Header settings
|
||||
SECURE_CONTENT_TYPE_NOSNIFF = True
|
||||
SECURE_CROSS_ORIGIN_OPENER_POLICY = 'same-origin'
|
||||
SECURE_REFERRER_POLICY = 'same-origin'
|
||||
X_FRAME_OPTIONS = 'SAMEORIGIN'
|
||||
|
||||
CSP_DEFAULT_SRC = ["'none'"]
|
||||
CSP_IMG_SRC = ["'self'", "data:"]
|
||||
CSP_FONT_SRC = ["'self'"]
|
||||
CSP_WORKER_SRC = ["'self'"]
|
||||
CSP_CONNECT_SRC = ["'self'"]
|
||||
CSP_FRAME_SRC = ["'self'"]
|
||||
CSP_FRAME_ANCESTORS = ["'self'"]
|
||||
# nuxt, vuetify and markdown preview use inline styles
|
||||
CSP_STYLE_SRC = ["'self'", "'unsafe-inline'"]
|
||||
# unsafe-inline:
|
||||
# Django Rest Framework inserts the CSRF token via an inline script. DRF will be CSP-compliant in version 3.15 (see https://github.com/encode/django-rest-framework/pull/8784)
|
||||
# NuxtJS injects a inline script in index.html
|
||||
# unsafe-eval:
|
||||
# Used by nuxt-vuex-localstorage; PR exists, but maintainer is not very active (see https://github.com/rubystarashe/nuxt-vuex-localstorage/issues/37)
|
||||
CSP_SCRIPT_SRC = ["'self'", "'unsafe-inline'", "'unsafe-eval'"]
|
||||
|
||||
PERMISSIONS_POLICY = {
|
||||
'publickey-credentials-get': '(self)',
|
||||
'clipboard-write': '(self)',
|
||||
'accelerometer': '()',
|
||||
'ambient-light-sensor': '()',
|
||||
'autoplay': '()',
|
||||
'battery': '()',
|
||||
'camera': '()',
|
||||
'cross-origin-isolated': '()',
|
||||
'display-capture': '()',
|
||||
'document-domain': '()',
|
||||
'encrypted-media': '()',
|
||||
'execution-while-not-rendered': '()',
|
||||
'execution-while-out-of-viewport': '()',
|
||||
'fullscreen': '()',
|
||||
'geolocation': '()',
|
||||
'gyroscope': '()',
|
||||
'keyboard-map': '()',
|
||||
'magnetometer': '()',
|
||||
'microphone': '()',
|
||||
'midi': '()',
|
||||
'navigation-override': '()',
|
||||
'payment': '()',
|
||||
'picture-in-picture': '()',
|
||||
'screen-wake-lock': '()',
|
||||
'sync-xhr': '()',
|
||||
'usb': '()',
|
||||
'web-share': '()',
|
||||
'xr-spatial-tracking': '()',
|
||||
'clipboard-read': '()',
|
||||
'gamepad': '()',
|
||||
'speaker-selection': '()',
|
||||
}
|
||||
|
||||
|
||||
# Generate HTTPS URIs in responses for requests behind a reverse proxy
|
||||
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
|
||||
|
||||
|
||||
# Monkey-Patch django to disable CSRF everywhere
|
||||
# CSRF middlware class is used as middleware and internally by DjangoRestFramework
|
||||
from django.middleware import csrf
|
||||
from reportcreator_api.utils.middleware import CustomCsrfMiddleware
|
||||
csrf.CsrfViewMiddleware = CustomCsrfMiddleware
|
||||
|
||||
|
||||
PDF_RENDER_SCRIPT_PATH = config('PDF_RENDER_SCRIPT_PATH', cast=Path, default=BASE_DIR / '..' / 'rendering' / 'dist' / 'bundle.js')
|
||||
CHROMIUM_EXECUTABLE = config('CHROMIUM_EXECUTABLE', default=None)
|
||||
|
||||
|
||||
# Celery client settings
|
||||
CELERY_BROKER_URL = config('CELERY_BROKER_URL', default='')
|
||||
CELERY_BROKER_URL_FILE = config('CELERY_BROKER_URL_FILE', default=None)
|
||||
if not CELERY_BROKER_URL and CELERY_BROKER_URL_FILE:
|
||||
CELERY_BROKER_URL = Path(CELERY_BROKER_URL_FILE).read_text()
|
||||
CELERY_RESULT_BACKEND = config('CELERY_RESULT_BACKEND', default='rpc://')
|
||||
|
||||
|
||||
CELERY_RESULT_EXPIRES = timedelta(seconds=30)
|
||||
CELERY_TASK_DEFAULT_EXCHANGE = 'tasks'
|
||||
CELERY_TASK_QUEUES_NO_DECLARE = config('CELERY_TASK_QUEUES_NO_DECLARE', cast=bool, default=False)
|
||||
from kombu import Queue
|
||||
CELERY_TASK_QUEUES = [
|
||||
Queue('rendering', routing_key='tasks.rendering', no_declare=CELERY_TASK_QUEUES_NO_DECLARE),
|
||||
]
|
||||
CELERY_TASK_ROUTES = {
|
||||
'reportcreator.render_pdf': {
|
||||
'exchange': CELERY_TASK_DEFAULT_EXCHANGE,
|
||||
'queue': 'rendering',
|
||||
'routing_key': 'tasks.rendering',
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
# Celery worker settings
|
||||
CELERY_SECURE_WORKER = config('CELERY_SECURE_WORKER', cast=bool, default=False)
|
||||
if CELERY_SECURE_WORKER:
|
||||
CELERY_WORKER_POOL = 'prefork'
|
||||
CELERY_WORKER_CONCURRENCY = 1
|
||||
CELERY_WORKER_MAX_TASKS_PER_CHILD = 1
|
||||
CELERY_WORKER_PREFETCH_MULTIPLIER = 1
|
||||
CELERY_BROKER_POOL_LIMIT = 0
|
||||
CELERY_TASK_ACKS_LATE = False
|
||||
CELERY_WORKER_ENABLE_REMOTE_CONTROL = True
|
||||
|
||||
|
||||
CELERY_WORKER_HIJACK_ROOT_LOGGER=False
|
||||
CELERY_WORKER_SEND_TASK_EVENTS = False
|
||||
CELERY_TASK_TIME_LIMIT = 60 * 5
|
||||
CELERY_TASK_SOFT_TIME_LIMIT = 60 * 5 + 10
|
||||
|
||||
# Execute tasks locally, if no broker is configured
|
||||
CELERY_TASK_ALWAYS_EAGER = not CELERY_BROKER_URL
|
||||
|
||||
|
||||
|
||||
# Periodic tasks
|
||||
PERIODIC_TASKS = [
|
||||
{
|
||||
'id': 'fetch_notifications',
|
||||
'task': 'reportcreator_api.notifications.tasks.fetch_notifications',
|
||||
'schedule': timedelta(days=1),
|
||||
},
|
||||
{
|
||||
'id': 'clear_sessions',
|
||||
'task': 'reportcreator_api.utils.tasks.clear_sessions',
|
||||
'schedule': timedelta(days=1),
|
||||
},
|
||||
{
|
||||
'id': 'cleanup_unreferenced_images_and_files',
|
||||
'task': 'reportcreator_api.pentests.tasks.cleanup_unreferenced_images_and_files',
|
||||
'schedule': timedelta(days=1),
|
||||
},
|
||||
{
|
||||
'id': 'reset_stale_archive_restores',
|
||||
'task': 'reportcreator_api.pentests.tasks.reset_stale_archive_restores',
|
||||
'schedule': timedelta(days=1),
|
||||
}
|
||||
]
|
||||
|
||||
|
||||
# MAX_LOCK_TIME should not be less than 1.30min, because some browsers (Chromium) triggers timers only once per minute if the browser tab is inactive
|
||||
MAX_LOCK_TIME = timedelta(seconds=90)
|
||||
|
||||
SPELLCHECK_URL = config('SPELLCHECK_URL', default=None)
|
||||
SPELLCHECK_DICTIONARY_PER_USER = config('SPELLCHECK_DICTIONARY_PER_USER', cast=bool, default=False)
|
||||
|
||||
BACKUP_KEY = config('BACKUP_KEY', default=None)
|
||||
|
||||
COMPRESS_IMAGES = config('COMPRESS_IMAGES', cast=bool, default=True)
|
||||
|
||||
|
||||
from reportcreator_api.archive.crypto import EncryptionKey
|
||||
ENCRYPTION_KEYS = EncryptionKey.from_json_list(config('ENCRYPTION_KEYS', default=''))
|
||||
DEFAULT_ENCRYPTION_KEY_ID = config('DEFAULT_ENCRYPTION_KEY_ID', default=None)
|
||||
ENCRYPTION_PLAINTEXT_FALLBACK = config('ENCRYPTION_PLAINTEXT_FALLBACK', cast=bool, default=True)
|
||||
|
||||
GUEST_USERS_CAN_IMPORT_PROJECTS = config('GUEST_USERS_CAN_IMPORT_PROJECTS', default=False)
|
||||
GUEST_USERS_CAN_CREATE_PROJECTS = config('GUEST_USERS_CAN_CREATE_PROJECTS', default=True)
|
||||
GUEST_USERS_CAN_DELETE_PROJECTS = config('GUEST_USERS_CAN_DELETE_PROJECTS', default=True)
|
||||
GUEST_USERS_CAN_UPDATE_PROJECT_SETTINGS = config('GUEST_USERS_CAN_UPDATE_PROJECT_SETTINGS', default=True)
|
||||
|
||||
ENABLE_PRIVATE_DESIGNS = config('ENABLE_PRIVATE_DESIGNS', cast=bool, default=False)
|
||||
|
||||
ARCHIVING_THRESHOLD = config('ARCHIVING_THRESHOLD', cast=int, default=2)
|
||||
assert ARCHIVING_THRESHOLD > 0
|
||||
|
||||
# Health checks
|
||||
HEALTH_CHECKS = {
|
||||
'cache': 'reportcreator_api.api_utils.healthchecks.check_cache',
|
||||
'database': 'reportcreator_api.api_utils.healthchecks.check_database',
|
||||
'migrations': 'reportcreator_api.api_utils.healthchecks.check_migrations',
|
||||
}
|
||||
|
||||
# Notifications
|
||||
VERSION = config('VERSION', default='dev')
|
||||
INSTANCE_TAGS = config('INSTANCE_TAGS', default='on-premise').split(';')
|
||||
NOTIFICATION_IMPORT_URL = config('NOTIFICATION_IMPORT_URL', default='https://cloud.sysreptor.com/api/v1/notifications/')
|
||||
|
||||
|
||||
# License
|
||||
LICENSE = config('LICENSE', default=None)
|
||||
LICENSE_VALIDATION_KEYS = [
|
||||
{'id': 'amber', 'algorithm': 'ed25519', 'key': 'MCowBQYDK2VwAyEAkqCS3lZbrzh+2mKTYymqPHtKBrh8glFxnj9OcoQR9xQ='},
|
||||
{'id': 'silver', 'algorithm': 'ed25519', 'key': 'MCowBQYDK2VwAyEAwu/cl0CZSSBFOzFSz/hhUQQjHIKiT4RS3ekPevSKn7w='},
|
||||
]
|
||||
LICENSE_COMMUNITY_MAX_USERS = 3
|
||||
|
||||
|
||||
# Elastic APM
|
||||
ELASTIC_APM_ENABLED = config('ELASTIC_APM_ENABLED', cast=bool, default=False)
|
||||
ELASTIC_APM = {
|
||||
'ENABLED': ELASTIC_APM_ENABLED,
|
||||
'SERVICE_NAME': config('ELASTIC_APM_SERVICE_NAME', default=''),
|
||||
'SERVICE_TOKEN': config('ELASTIC_APM_SERVICE_TOKEN', default=''),
|
||||
'SERVER_URL': config('ELASTIC_APM_SERVER_URL', default=''),
|
||||
'SPAN_COMPRESSION_ENABLED': False,
|
||||
'DJANGO_AUTOINSERT_MIDDLEWARE': False,
|
||||
'DJANGO_TRANSACTION_NAME_FROM_ROUTE': True,
|
||||
}
|
||||
if ELASTIC_APM_ENABLED:
|
||||
INSTALLED_APPS.append('elasticapm.contrib.django')
|
||||
MIDDLEWARE.insert(1, 'elasticapm.contrib.django.middleware.TracingMiddleware')
|
||||
|
||||
ELASTIC_APM_RUM_ENABLED = config('ELASTIC_APM_RUM_ENABLED', cast=bool, default=False)
|
||||
ELASTIC_APM_RUM_CONFIG = {
|
||||
'active': ELASTIC_APM_RUM_ENABLED,
|
||||
'serviceName': config('ELASTIC_APM_RUM_SERVICE_NAME', default=''),
|
||||
'serverUrl': config('ELASTIC_APM_RUM_SERVER_URL', default=''),
|
||||
'serviceVersion': 'dev',
|
||||
}
|
||||
if ELASTIC_APM_RUM_ENABLED:
|
||||
CSP_CONNECT_SRC.append(ELASTIC_APM_RUM_CONFIG['serverUrl'])
|
||||
|
||||
|
||||
if DEBUG:
|
||||
INSTALLED_APPS += [
|
||||
'debug_toolbar',
|
||||
]
|
||||
MIDDLEWARE += [
|
||||
'debug_toolbar.middleware.DebugToolbarMiddleware',
|
||||
]
|
||||
INTERNAL_IPS = type(str('c'), (), {'__contains__': lambda *a: True})()
|
||||
|
||||
|
||||
|
||||
logging_handlers = ['console'] + (['elasticapm'] if ELASTIC_APM_ENABLED else [])
|
||||
LOGGING = {
|
||||
'version': 1,
|
||||
'disabled_existing_loggers': False,
|
||||
'formatters': {
|
||||
'default': {
|
||||
'class': 'logging.Formatter',
|
||||
'format': '%(asctime)s [%(levelname)s] %(name)s: %(message)s'
|
||||
},
|
||||
},
|
||||
'handlers': {
|
||||
'console': {
|
||||
'level': 'DEBUG',
|
||||
'formatter': 'default',
|
||||
'class': 'logging.StreamHandler',
|
||||
},
|
||||
'elasticapm': {
|
||||
'level': 'WARNING',
|
||||
'class': 'elasticapm.contrib.django.handlers.LoggingHandler',
|
||||
},
|
||||
},
|
||||
'root': {
|
||||
'level': 'INFO',
|
||||
'handlers': logging_handlers,
|
||||
},
|
||||
'loggers': {
|
||||
'celery': {
|
||||
'level': 'WARNING',
|
||||
'handlers': logging_handlers,
|
||||
'propagate': False,
|
||||
},
|
||||
'celery.worker.strategy': {
|
||||
'level': 'INFO',
|
||||
'handlers': logging_handlers,
|
||||
'propagate': False,
|
||||
},
|
||||
'weasyprint': {
|
||||
'level': 'ERROR',
|
||||
'handlers': logging_handlers,
|
||||
'propagate': False,
|
||||
},
|
||||
'playwright': {
|
||||
'level': 'WARNING',
|
||||
'hanlders': logging_handlers,
|
||||
'propagate': False,
|
||||
},
|
||||
'pikepdf': {
|
||||
'level': 'WARNING',
|
||||
'handlers': logging_handlers,
|
||||
'propagate': False,
|
||||
},
|
||||
'fontTools': {
|
||||
'level': 'WARNING',
|
||||
'handlers': logging_handlers,
|
||||
'propagate': False,
|
||||
},
|
||||
}
|
||||
}
|
|
@ -0,0 +1,30 @@
|
|||
from reportcreator_api.conf.settings import *
|
||||
|
||||
|
||||
STORAGES = STORAGES | {
|
||||
'uploaded_images': {'BACKEND': 'django.core.files.storage.InMemoryStorage'},
|
||||
'uploaded_assets': {'BACKEND': 'django.core.files.storage.InMemoryStorage'},
|
||||
'uploaded_files': {'BACKEND': 'django.core.files.storage.InMemoryStorage'},
|
||||
'archived_files': {'BACKEND': 'django.core.files.storage.InMemoryStorage'},
|
||||
}
|
||||
|
||||
|
||||
REST_FRAMEWORK['DEFAULT_THROTTLE_CLASSES'] = []
|
||||
REST_FRAMEWORK['TEST_REQUEST_DEFAULT_FORMAT'] = 'json'
|
||||
|
||||
|
||||
AUTHLIB_OAUTH_CLIENTS = {}
|
||||
ELASTIC_APM_ENABLED = False
|
||||
ELASTIC_APM_RUM_ENABLED = False
|
||||
CELERY_TASK_ALWAYS_EAGER = True
|
||||
NOTIFICATION_IMPORT_URL = None
|
||||
|
||||
ENABLE_PRIVATE_DESIGNS = True
|
||||
ARCHIVING_THRESHOLD = 1
|
||||
|
||||
BACKUP_KEY = 'dummy-backup-key-used-in-unit-test'
|
||||
|
||||
|
||||
# Disable license check
|
||||
from reportcreator_api.utils import license
|
||||
license.check_license = lambda: {'type': license.LicenseType.PROFESSIONAL, 'users': 1000}
|
|
@ -0,0 +1,88 @@
|
|||
from django.conf import settings
|
||||
from django.conf.urls.static import static
|
||||
from django.contrib import admin
|
||||
from django.urls import path, include, re_path
|
||||
from django.http import HttpResponse
|
||||
from django.views.generic.base import TemplateView, RedirectView
|
||||
from rest_framework.routers import DefaultRouter
|
||||
from rest_framework_nested.routers import NestedSimpleRouter
|
||||
|
||||
from reportcreator_api.api_utils.views import SpellcheckWordView, UtilsViewSet, SpellcheckView, HealthcheckView
|
||||
from reportcreator_api.pentests.views import ArchivedProjectKeyPartViewSet, ArchivedProjectViewSet, FindingTemplateViewSet, PentestFindingViewSet, PentestProjectViewSet, ProjectNotebookPageViewSet, \
|
||||
PentestProjectPreviewView, PentestProjectGenerateView, \
|
||||
ProjectTypeViewSet, ProjectTypePreviewView, \
|
||||
ReportSectionViewSet, UploadedAssetViewSet, UploadedImageViewSet, UploadedProjectFileViewSet, UploadedUserNotebookImageViewSet, UserNotebookPageViewSet, UserPublicKeyViewSet
|
||||
from reportcreator_api.users.views import PentestUserViewSet, MFAMethodViewSet, AuthViewSet, AuthIdentityViewSet
|
||||
from reportcreator_api.notifications.views import NotificationViewSet
|
||||
|
||||
|
||||
router = DefaultRouter()
|
||||
router.register('pentestusers', PentestUserViewSet, basename='pentestuser')
|
||||
router.register('pentestusers/self/notes/images', UploadedUserNotebookImageViewSet, basename='uploadedusernotebookimage')
|
||||
router.register('pentestusers/self/notes', UserNotebookPageViewSet, basename='usernotebookpage')
|
||||
router.register('projecttypes', ProjectTypeViewSet, basename='projecttype')
|
||||
router.register('pentestprojects', PentestProjectViewSet, basename='pentestproject')
|
||||
router.register('archivedprojects', ArchivedProjectViewSet, basename='archivedproject')
|
||||
router.register('findingtemplates', FindingTemplateViewSet, basename='findingtemplate')
|
||||
router.register('utils', UtilsViewSet, basename='utils')
|
||||
router.register('auth', AuthViewSet, basename='auth')
|
||||
|
||||
user_router = NestedSimpleRouter(router, 'pentestusers', lookup='pentestuser')
|
||||
user_router.register('mfa', MFAMethodViewSet, basename='mfamethod')
|
||||
user_router.register('identities', AuthIdentityViewSet, basename='authidentity')
|
||||
user_router.register('notifications', NotificationViewSet, basename='notification')
|
||||
user_router.register('publickeys', UserPublicKeyViewSet, basename='userpublickey')
|
||||
|
||||
project_router = NestedSimpleRouter(router, 'pentestprojects', lookup='project')
|
||||
project_router.register('sections', ReportSectionViewSet, basename='section')
|
||||
project_router.register('findings', PentestFindingViewSet, basename='finding')
|
||||
project_router.register('notes', ProjectNotebookPageViewSet, basename='projectnotebookpage')
|
||||
project_router.register('images', UploadedImageViewSet, basename='uploadedimage')
|
||||
project_router.register('files', UploadedProjectFileViewSet, basename='uploadedprojectfile')
|
||||
|
||||
projecttype_router = NestedSimpleRouter(router, 'projecttypes', lookup='projecttype')
|
||||
projecttype_router.register('assets', UploadedAssetViewSet, basename='uploadedasset')
|
||||
|
||||
archivedproject_router = NestedSimpleRouter(router, 'archivedprojects', lookup='archivedproject')
|
||||
archivedproject_router.register('keyparts', ArchivedProjectKeyPartViewSet, basename='archivedprojectkeypart')
|
||||
|
||||
# Make trailing slash in URL optional to support loading images and assets by fielname
|
||||
router.trailing_slash = '/?'
|
||||
project_router.trailing_slash = '/?'
|
||||
projecttype_router.trailing_slash = '/?'
|
||||
archivedproject_router.trailing_slash = '/?'
|
||||
|
||||
|
||||
urlpatterns = [
|
||||
path('admin/login/', RedirectView.as_view(url='/users/self/admin/enable/')),
|
||||
path('admin/', admin.site.urls),
|
||||
re_path(r'^api/?$', RedirectView.as_view(url='/api/v1/')),
|
||||
path('api/v1/', include([
|
||||
path('', include(router.urls)),
|
||||
path('', include(user_router.urls)),
|
||||
path('', include(project_router.urls)),
|
||||
path('', include(projecttype_router.urls)),
|
||||
path('', include(archivedproject_router.urls)),
|
||||
|
||||
# Async views
|
||||
path('utils/spellcheck/', SpellcheckView.as_view(), name='utils-spellcheck'),
|
||||
path('utils/spellcheck/words/', SpellcheckWordView.as_view(), name='utils-spellcheck-words'),
|
||||
path('utils/healthcheck/', HealthcheckView.as_view(), name='utils-healthcheck'),
|
||||
path('pentestprojects/<uuid:pk>/preview/', PentestProjectPreviewView.as_view(), name='pentestproject-preview'),
|
||||
path('pentestprojects/<uuid:pk>/generate/', PentestProjectGenerateView.as_view(), name='pentestproject-generate'),
|
||||
path('projecttypes/<uuid:pk>/preview/', ProjectTypePreviewView.as_view(), name='projecttype-preview'),
|
||||
])),
|
||||
|
||||
# Static files
|
||||
path('robots.txt', lambda *args, **kwargs: HttpResponse("User-Agent: *\nDisallow: /\n", content_type="text/plain")),
|
||||
|
||||
# Fallback URL for SPA
|
||||
re_path(r'^(?!(api|admin)).*/?$', TemplateView.as_view(template_name='index.html')),
|
||||
]
|
||||
|
||||
|
||||
if settings.DEBUG:
|
||||
urlpatterns = [
|
||||
path('__debug__/', include('debug_toolbar.urls')),
|
||||
] + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT) + urlpatterns
|
||||
|
|
@ -0,0 +1,18 @@
|
|||
"""
|
||||
WSGI config for reportcreator_api project.
|
||||
|
||||
It exposes the WSGI callable as a module-level variable named ``application``.
|
||||
|
||||
For more information on this file, see
|
||||
https://docs.djangoproject.com/en/4.0/howto/deployment/wsgi/
|
||||
"""
|
||||
|
||||
import os
|
||||
|
||||
from pathlib import Path
|
||||
from whitenoise import WhiteNoise
|
||||
from django.core.wsgi import get_wsgi_application
|
||||
|
||||
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'reportcreator_api.conf.settings')
|
||||
|
||||
application = get_wsgi_application()
|
|
@ -0,0 +1,33 @@
|
|||
from django.core.management.base import BaseCommand
|
||||
from django.db import transaction
|
||||
from reportcreator_api.pentests.models import ArchivedProject, UploadedAsset, UploadedImage, UploadedProjectFile, UploadedUserNotebookImage
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = 'Clean up file entries from the DB where the files do not exist on the fielsystem.'
|
||||
|
||||
def file_exists(self, f):
|
||||
try:
|
||||
with f.open():
|
||||
return True
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
@transaction.atomic
|
||||
def handle(self, *args, **options):
|
||||
UploadedAsset.objects \
|
||||
.filter(pk__in=[o.pk for o in UploadedAsset.objects.iterator() if not self.file_exists(o.file)]) \
|
||||
.delete()
|
||||
UploadedImage.objects \
|
||||
.filter(pk__in=[o.pk for o in UploadedImage.objects.iterator() if not self.file_exists(o.file)]) \
|
||||
.delete()
|
||||
UploadedUserNotebookImage.objects \
|
||||
.filter(pk__in=[o.pk for o in UploadedUserNotebookImage.objects.iterator() if not self.file_exists(o.file)]) \
|
||||
.delete()
|
||||
UploadedProjectFile.objects \
|
||||
.filter(pk__in=[o.pk for o in UploadedProjectFile.objects.iterator() if not self.file_exists(o.file)]) \
|
||||
.delete()
|
||||
ArchivedProject.objects \
|
||||
.filter(pk__in=[o.pk for o in ArchivedProject.objects.iterator() if not self.file_exists(o.file)]) \
|
||||
.delete()
|
||||
|
|
@ -0,0 +1,66 @@
|
|||
import itertools
|
||||
import warnings
|
||||
import copy
|
||||
from django.conf import settings
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
from django.test import override_settings
|
||||
|
||||
from reportcreator_api.pentests.models import PentestFinding, PentestProject, ProjectType, UploadedAsset, UploadedImage, \
|
||||
UploadedProjectFile, UploadedUserNotebookImage, NotebookPage, UserPublicKey, ArchivedProjectKeyPart, ArchivedProjectPublicKeyEncryptedKeyPart
|
||||
from reportcreator_api.users.models import MFAMethod, PentestUser, Session
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = 'Encrypt all data using the current encryption key. If data was encrypted with a different key, it is re-encrypted with the current key.'
|
||||
|
||||
def add_arguments(self, parser) -> None:
|
||||
parser.add_argument('--decrypt', action='store_true', help='Decrypt all data')
|
||||
|
||||
def encrypt_data(self):
|
||||
# Encrypt DB fields
|
||||
PentestProject.objects.bulk_update(PentestProject.objects.all().iterator(), ['custom_fields'])
|
||||
PentestFinding.objects.bulk_update(PentestFinding.objects.all().iterator(), ['custom_fields', 'template_id'])
|
||||
ProjectType.objects.bulk_update(ProjectType.objects.all().iterator(), ['report_template', 'report_styles', 'report_preview_data'])
|
||||
NotebookPage.objects.bulk_update(NotebookPage.objects.all(), ['title', 'text'])
|
||||
PentestUser.objects.bulk_update(PentestUser.objects.all(), ['password'])
|
||||
Session.objects.bulk_update(Session.objects.all(), ['session_key', 'session_data'])
|
||||
MFAMethod.objects.bulk_update(MFAMethod.objects.all(), ['data'])
|
||||
UserPublicKey.objects.bulk_update(UserPublicKey.objects.all(), ['public_key'])
|
||||
ArchivedProjectKeyPart.objects.bulk_update(ArchivedProjectKeyPart.objects.all(), ['key_part'])
|
||||
ArchivedProjectPublicKeyEncryptedKeyPart.objects.bulk_update(ArchivedProjectPublicKeyEncryptedKeyPart.objects.all(), ['encrypted_data'])
|
||||
|
||||
# Encrypt files
|
||||
old_files = []
|
||||
for f in itertools.chain(
|
||||
UploadedImage.objects.all(),
|
||||
UploadedAsset.objects.all(),
|
||||
UploadedUserNotebookImage.objects.all(),
|
||||
UploadedProjectFile.objects.all()
|
||||
):
|
||||
# Copy file content. Encryption is performed during content copy to new file by the storage
|
||||
old_file = copy.copy(f.file)
|
||||
f.file.save(name=f.name, content=old_file, save=False)
|
||||
f.save()
|
||||
old_files.append(old_file)
|
||||
for f in old_files:
|
||||
f.storage.delete(f.name)
|
||||
|
||||
def handle(self, decrypt, *args, **options):
|
||||
if not settings.ENCRYPTION_KEYS:
|
||||
raise CommandError('No ENCRYPTION_KEYS configured')
|
||||
|
||||
if decrypt:
|
||||
if settings.DEFAULT_ENCRYPTION_KEY_ID:
|
||||
warnings.warn('A DEFAULT_ENCRYPTION_KEY_ID is configured. New and updated data will be encrypted while storing it. Set DEFAULT_ENCRYPTION_KEY_ID=None to permanently disable encryption.')
|
||||
|
||||
with override_settings(DEFAULT_ENCRYPTION_KEY_ID=None, ENCRYPTION_PLAINTEXT_FALLBACK=True):
|
||||
self.encrypt_data()
|
||||
else:
|
||||
if not settings.DEFAULT_ENCRYPTION_KEY_ID:
|
||||
raise CommandError('No DEFAULT_ENCRYPTION_KEY_ID configured')
|
||||
if not settings.ENCRYPTION_KEYS.get(settings.DEFAULT_ENCRYPTION_KEY_ID):
|
||||
raise CommandError('Invalid DEFAULT_ENCRYPTION_KEY_ID')
|
||||
with override_settings(ENCRYPTION_PLAINTEXT_FALLBACK=True):
|
||||
self.encrypt_data()
|
||||
|
||||
|
|
@ -0,0 +1,50 @@
|
|||
|
||||
|
||||
import argparse
|
||||
import shutil
|
||||
import tempfile
|
||||
import uuid
|
||||
from django.conf import settings
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
from reportcreator_api.pentests.models.project import PentestProject
|
||||
|
||||
from reportcreator_api.users.models import PentestUser
|
||||
from reportcreator_api.archive.import_export import import_project_types, import_templates, import_projects
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = 'Import archives containing demo data'
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument('file', nargs='?', type=argparse.FileType('rb'), default='-')
|
||||
parser.add_argument('--type', choices=['design', 'template', 'project'])
|
||||
parser.add_argument('--add-member', action='append', help='Add user as member to imported projects')
|
||||
|
||||
def get_user(self, u):
|
||||
try:
|
||||
return PentestUser.objects.get(id=uuid.UUID(u))
|
||||
except Exception:
|
||||
pass
|
||||
try:
|
||||
return PentestUser.objects.get(username=u)
|
||||
except PentestUser.DoesNotExist:
|
||||
raise CommandError(f'User "{u}" not found')
|
||||
|
||||
def handle(self, file, type, add_member, *args, **options):
|
||||
if type == 'project':
|
||||
add_member = list(map(self.get_user, add_member))
|
||||
|
||||
import_func = {
|
||||
'design': import_project_types,
|
||||
'template': import_templates,
|
||||
'project': import_projects,
|
||||
}[type]
|
||||
|
||||
with tempfile.SpooledTemporaryFile(max_size=settings.FILE_UPLOAD_MAX_MEMORY_SIZE, mode='w+b') as f:
|
||||
shutil.copyfileobj(file, f)
|
||||
f.seek(0)
|
||||
imported = import_func(f)
|
||||
if type == 'project':
|
||||
for u in add_member:
|
||||
PentestProject.objects.add_member(u, imported)
|
||||
|
|
@ -0,0 +1,15 @@
|
|||
from django.contrib import admin
|
||||
|
||||
from reportcreator_api.utils.admin import BaseAdmin
|
||||
from reportcreator_api.notifications.models import NotificationSpec, UserNotification
|
||||
|
||||
|
||||
@admin.register(NotificationSpec)
|
||||
class NotificationSpecAdmin(BaseAdmin):
|
||||
pass
|
||||
|
||||
|
||||
@admin.register(UserNotification)
|
||||
class UserNotificationAdmin(BaseAdmin):
|
||||
pass
|
||||
|
|
@ -0,0 +1,10 @@
|
|||
from django.apps import AppConfig
|
||||
|
||||
|
||||
class NotificationsConfig(AppConfig):
|
||||
default_auto_field = 'django.db.models.BigAutoField'
|
||||
name = 'reportcreator_api.notifications'
|
||||
|
||||
def ready(self) -> None:
|
||||
from . import signals # noqa
|
||||
from . import tasks # noqa
|
|
@ -0,0 +1,55 @@
|
|||
# Generated by Django 4.1.3 on 2023-01-24 13:11
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
import reportcreator_api.utils.models
|
||||
import uuid
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
initial = True
|
||||
|
||||
dependencies = [
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='NotificationSpec',
|
||||
fields=[
|
||||
('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
|
||||
('created', models.DateTimeField(default=reportcreator_api.utils.models.now, editable=False)),
|
||||
('updated', models.DateTimeField(auto_now=True)),
|
||||
('active_until', models.DateField(blank=True, db_index=True, null=True)),
|
||||
('instance_conditions', models.JSONField(blank=True, default=dict)),
|
||||
('user_conditions', models.JSONField(blank=True, default=dict)),
|
||||
('visible_for_days', models.IntegerField(blank=True, null=True)),
|
||||
('title', models.CharField(max_length=255)),
|
||||
('text', models.TextField()),
|
||||
('link_url', models.TextField(blank=True, null=True)),
|
||||
],
|
||||
options={
|
||||
'ordering': ['-created'],
|
||||
'abstract': False,
|
||||
},
|
||||
bases=(reportcreator_api.utils.models.ModelDiffMixin, models.Model),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='UserNotification',
|
||||
fields=[
|
||||
('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
|
||||
('created', models.DateTimeField(default=reportcreator_api.utils.models.now, editable=False)),
|
||||
('updated', models.DateTimeField(auto_now=True)),
|
||||
('visible_until', models.DateTimeField(blank=True, null=True)),
|
||||
('read', models.BooleanField(db_index=True, default=False)),
|
||||
('notification', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='notifications.notificationspec')),
|
||||
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='notifications', to=settings.AUTH_USER_MODEL)),
|
||||
],
|
||||
options={
|
||||
'unique_together': {('user', 'notification')},
|
||||
},
|
||||
bases=(reportcreator_api.utils.models.ModelDiffMixin, models.Model),
|
||||
),
|
||||
]
|
|
@ -0,0 +1,38 @@
|
|||
from django.db import models
|
||||
|
||||
from reportcreator_api.notifications import querysets
|
||||
from reportcreator_api.utils.models import BaseModel
|
||||
from reportcreator_api.users.models import PentestUser
|
||||
|
||||
|
||||
class NotificationSpec(BaseModel):
|
||||
"""
|
||||
Specification for a notification that gets assigned to users.
|
||||
"""
|
||||
active_until = models.DateField(null=True, blank=True, db_index=True)
|
||||
instance_conditions = models.JSONField(default=dict, blank=True)
|
||||
user_conditions = models.JSONField(default=dict, blank=True)
|
||||
visible_for_days = models.IntegerField(null=True, blank=True)
|
||||
|
||||
title = models.CharField(max_length=255)
|
||||
text = models.TextField()
|
||||
link_url = models.TextField(null=True, blank=True)
|
||||
|
||||
objects = querysets.NotificationSpecManager()
|
||||
|
||||
|
||||
class UserNotification(BaseModel):
|
||||
"""
|
||||
Notification assigned to a specific user. Can marked as read.
|
||||
"""
|
||||
user = models.ForeignKey(to=PentestUser, on_delete=models.CASCADE, related_name='notifications')
|
||||
notification = models.ForeignKey(to=NotificationSpec, on_delete=models.CASCADE)
|
||||
|
||||
visible_until = models.DateTimeField(null=True, blank=True, )
|
||||
read = models.BooleanField(default=False, db_index=True)
|
||||
|
||||
objects = models.Manager.from_queryset(querysets.UserNotificationQuerySet)()
|
||||
|
||||
class Meta:
|
||||
unique_together = [('user', 'notification')]
|
||||
|
|
@ -0,0 +1,128 @@
|
|||
from packaging import version
|
||||
from datetime import timedelta
|
||||
from django.conf import settings
|
||||
from django.db import models
|
||||
from django.utils import timezone
|
||||
from django.db.models import signals
|
||||
|
||||
from reportcreator_api.utils import license
|
||||
|
||||
|
||||
class UserNotificationQuerySet(models.QuerySet):
|
||||
def only_permitted(self, user):
|
||||
return self.filter(user=user)
|
||||
|
||||
def only_visible(self):
|
||||
return self \
|
||||
.filter(models.Q(notification__active_until__isnull=True) | models.Q(notification__active_until__gt=timezone.now())) \
|
||||
.filter(models.Q(visible_until__isnull=True) | models.Q(visible_until__gt=timezone.now()))
|
||||
|
||||
|
||||
class NotificationSpecQuerySet(models.QuerySet):
|
||||
def only_active(self):
|
||||
return self.filter(models.Q(active_until__isnull=True) | models.Q(active_until__gte=timezone.now()))
|
||||
|
||||
|
||||
class NotificationSpecManager(models.Manager.from_queryset(NotificationSpecQuerySet)):
|
||||
def parse_version(self, version_str):
|
||||
try:
|
||||
return version.Version(version_str)
|
||||
except (version.InvalidVersion, TypeError):
|
||||
return None
|
||||
|
||||
def check_version(self, version_condition):
|
||||
current_version = self.parse_version(settings.VERSION)
|
||||
if not current_version:
|
||||
if settings.VERSION and version_condition and (version_condition == settings.VERSION or version_condition == f'=={settings.VERSION}'):
|
||||
return True
|
||||
return False
|
||||
|
||||
if version_condition.startswith('=='):
|
||||
return current_version == self.parse_version(version_condition[2:])
|
||||
elif version_condition.startswith('>='):
|
||||
required_version = self.parse_version(version_condition[2:])
|
||||
return required_version and current_version >= required_version
|
||||
elif version_condition.startswith('<='):
|
||||
required_version = self.parse_version(version_condition[2:])
|
||||
return required_version and current_version <= required_version
|
||||
elif version_condition.startswith('>'):
|
||||
required_version = self.parse_version(version_condition[1:])
|
||||
return required_version and current_version > required_version
|
||||
elif version_condition.startswith('<'):
|
||||
required_version = self.parse_version(version_condition[1:])
|
||||
return required_version and current_version < required_version
|
||||
else:
|
||||
return current_version == self.parse_version(version_condition)
|
||||
|
||||
def check_instance_conditions(self, notification):
|
||||
current_instance_tags = list(settings.INSTANCE_TAGS)
|
||||
if license.is_professional():
|
||||
current_instance_tags.append('license:professional')
|
||||
elif not license.is_professional() and not license.check_license()['error']:
|
||||
current_instance_tags.append('license:community')
|
||||
if (instance_tags := set(notification.instance_conditions.get('any_tag', []))) and not instance_tags.intersection(current_instance_tags):
|
||||
return False
|
||||
if (version_condition := notification.instance_conditions.get('version')) and not self.check_version(version_condition):
|
||||
return False
|
||||
return True
|
||||
|
||||
def users_for_notification(self, notification):
|
||||
from reportcreator_api.users.models import PentestUser
|
||||
|
||||
if notification.active_until and notification.active_until < timezone.now().date():
|
||||
return PentestUser.objects.none()
|
||||
|
||||
# Instance conditions
|
||||
if not self.check_instance_conditions(notification):
|
||||
return PentestUser.objects.none()
|
||||
|
||||
# User conditions
|
||||
users = PentestUser.objects.all()
|
||||
for role in ['is_superuser', 'is_designer', 'is_template_editor', 'is_user_manager']:
|
||||
if role in notification.user_conditions and isinstance(notification.user_conditions[role], bool):
|
||||
users = users.filter(**{role: notification.user_conditions[role]})
|
||||
|
||||
return users
|
||||
|
||||
def notifications_for_user(self, user):
|
||||
from reportcreator_api.notifications.models import NotificationSpec
|
||||
|
||||
notifications = NotificationSpec.objects \
|
||||
.only_active() \
|
||||
.filter(models.Q(user_conditions__is_superuser__isnull=True) | models.Q(user_conditions__is_superuser=user.is_superuser)) \
|
||||
.filter(models.Q(user_conditions__is_desinger__isnull=True) | models.Q(user_conditions__is_designer=user.is_designer)) \
|
||||
.filter(models.Q(user_conditions__is_template_editor__isnull=True) | models.Q(user_conditions__is_template_editor=user.is_template_editor)) \
|
||||
.filter(models.Q(user_conditions__is_user_manager__isnull=True) | models.Q(user_conditions__is_user_manager=user.is_user_manager))
|
||||
notifications = list(filter(self.check_instance_conditions, notifications))
|
||||
return notifications
|
||||
|
||||
def assign_to_users(self, notification):
|
||||
from reportcreator_api.notifications.models import UserNotification
|
||||
users = self.users_for_notification(notification) \
|
||||
.exclude(notifications__notification=notification)
|
||||
|
||||
user_notifications = []
|
||||
for u in users:
|
||||
visible_until = None
|
||||
if notification.visible_for_days:
|
||||
visible_until = timezone.now() + timedelta(days=notification.visible_for_days)
|
||||
user_notifications.append(UserNotification(user=u, notification=notification, visible_until=visible_until))
|
||||
UserNotification.objects.bulk_create(user_notifications)
|
||||
|
||||
def assign_to_notifications(self, user):
|
||||
from reportcreator_api.notifications.models import UserNotification
|
||||
notifications = self.notifications_for_user(user)
|
||||
|
||||
user_notifications = []
|
||||
for n in notifications:
|
||||
visible_until = None
|
||||
if n.visible_for_days:
|
||||
visible_until = timezone.now() + timedelta(days=n.visible_for_days)
|
||||
user_notifications.append(UserNotification(user=user, notification=n, visible_until=visible_until))
|
||||
UserNotification.objects.bulk_create(user_notifications)
|
||||
|
||||
def bulk_create(self, *args, **kwargs):
|
||||
objs = super().bulk_create(*args, **kwargs)
|
||||
for o in objs:
|
||||
signals.post_save.send(sender=o.__class__, instance=o, created=True, raw=False, update_fields=None)
|
||||
return objs
|
|
@ -0,0 +1,57 @@
|
|||
from datetime import timedelta
|
||||
from django.utils import timezone
|
||||
from rest_framework import serializers
|
||||
|
||||
from reportcreator_api.notifications.models import UserNotification, NotificationSpec
|
||||
|
||||
|
||||
class NotificationSpecContentSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = NotificationSpec
|
||||
fields = ['title', 'text', 'link_url']
|
||||
|
||||
|
||||
class UserNotificationSerializer(serializers.ModelSerializer):
|
||||
content = NotificationSpecContentSerializer(source='notification', read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = UserNotification
|
||||
fields = ['id', 'created', 'updated', 'read', 'content']
|
||||
|
||||
|
||||
class InstanceConditionsSerializer(serializers.Serializer):
|
||||
version = serializers.RegexField(r'^(==|>=|<=|>|<)?[0-9a-zA-Z.]+$', required=False)
|
||||
any_tags = serializers.ListField(child=serializers.CharField(), required=False)
|
||||
|
||||
|
||||
class UserConditionsSerializer(serializers.Serializer):
|
||||
is_superuser = serializers.BooleanField(required=False)
|
||||
is_user_manager = serializers.BooleanField(required=False)
|
||||
is_designer = serializers.BooleanField(required=False)
|
||||
is_template_editor = serializers.BooleanField(required=False)
|
||||
|
||||
|
||||
class NotificationSpecListSerializer(serializers.ListSerializer):
|
||||
def create(self, validated_data):
|
||||
notifications = [NotificationSpec(**n) for n in validated_data]
|
||||
# Set deleted notifications as inactive
|
||||
NotificationSpec.objects \
|
||||
.only_active() \
|
||||
.exclude(id__in=[n.id for n in notifications]) \
|
||||
.update(active_until=(timezone.now() - timedelta(days=1)).date())
|
||||
# Create new notifications
|
||||
existing_notification_ids = set(NotificationSpec.objects.filter(id__in=[n.id for n in notifications]).values_list('id', flat=True))
|
||||
new_notifications = list(filter(lambda n: n.id not in existing_notification_ids and NotificationSpec.objects.check_instance_conditions(n), notifications))
|
||||
return NotificationSpec.objects.bulk_create(new_notifications)
|
||||
|
||||
|
||||
class NotificationSpecSerializer(serializers.ModelSerializer):
|
||||
instance_conditions = InstanceConditionsSerializer(required=False)
|
||||
user_conditions = UserConditionsSerializer(required=False)
|
||||
|
||||
class Meta:
|
||||
model = NotificationSpec
|
||||
fields = ['id', 'active_until', 'visible_for_days', 'instance_conditions', 'user_conditions', 'title', 'text', 'link_url']
|
||||
extra_kwargs = {'id': {'read_only': False}}
|
||||
list_serializer_class = NotificationSpecListSerializer
|
||||
|
|
@ -0,0 +1,25 @@
|
|||
from django.db.models import signals
|
||||
from django.dispatch import receiver
|
||||
|
||||
from reportcreator_api.notifications.models import NotificationSpec
|
||||
from reportcreator_api.users.models import PentestUser
|
||||
from reportcreator_api.utils.models import disable_for_loaddata
|
||||
|
||||
|
||||
@receiver(signals.post_save, sender=NotificationSpec)
|
||||
@disable_for_loaddata
|
||||
def notification_created(sender, instance, created, *args, **kwargs):
|
||||
if not created:
|
||||
return
|
||||
|
||||
NotificationSpec.objects.assign_to_users(instance)
|
||||
|
||||
|
||||
@receiver(signals.post_save, sender=PentestUser)
|
||||
@disable_for_loaddata
|
||||
def user_created(sender, instance, created, *args, **kwargs):
|
||||
if not created:
|
||||
return
|
||||
|
||||
NotificationSpec.objects.assign_to_notifications(instance)
|
||||
|
|
@ -0,0 +1,23 @@
|
|||
import httpx
|
||||
from asgiref.sync import sync_to_async
|
||||
from django.conf import settings
|
||||
|
||||
from reportcreator_api.notifications.serializers import NotificationSpecSerializer
|
||||
|
||||
|
||||
async def fetch_notifications_request():
|
||||
async with httpx.AsyncClient(timeout=10) as client:
|
||||
res = await client.get(settings.NOTIFICATION_IMPORT_URL)
|
||||
res.raise_for_status()
|
||||
return res.json()
|
||||
|
||||
|
||||
async def fetch_notifications(task_info):
|
||||
if not settings.NOTIFICATION_IMPORT_URL:
|
||||
return
|
||||
|
||||
data = await fetch_notifications_request()
|
||||
serializer = NotificationSpecSerializer(data=data, many=True)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
await sync_to_async(serializer.save)()
|
||||
|
|
@ -0,0 +1,21 @@
|
|||
from rest_framework import viewsets, mixins, permissions
|
||||
from rest_framework.settings import api_settings
|
||||
|
||||
from reportcreator_api.notifications.models import UserNotification
|
||||
from reportcreator_api.notifications.serializers import UserNotificationSerializer
|
||||
|
||||
|
||||
class NotificationPermissions(permissions.BasePermission):
|
||||
def has_permission(self, request, view):
|
||||
return view.kwargs.get('pentestuser_pk') == 'self'
|
||||
|
||||
|
||||
class NotificationViewSet(mixins.ListModelMixin, mixins.RetrieveModelMixin, mixins.UpdateModelMixin, viewsets.GenericViewSet):
|
||||
serializer_class = UserNotificationSerializer
|
||||
permission_classes = api_settings.DEFAULT_PERMISSION_CLASSES + [NotificationPermissions]
|
||||
|
||||
def get_queryset(self):
|
||||
return UserNotification.objects \
|
||||
.only_permitted(self.request.user) \
|
||||
.only_visible() \
|
||||
.select_related('notification')
|
|
@ -0,0 +1,131 @@
|
|||
from django.contrib import admin
|
||||
|
||||
from reportcreator_api.pentests.models import FindingTemplate, NotebookPage, PentestFinding, PentestProject, ProjectType, ReportSection, \
|
||||
UploadedImage, UploadedAsset, UploadedProjectFile, UploadedUserNotebookImage, ProjectMemberInfo, ProjectMemberRole, \
|
||||
UserPublicKey, ArchivedProject, ArchivedProjectKeyPart, ArchivedProjectPublicKeyEncryptedKeyPart
|
||||
from reportcreator_api.utils.admin import BaseAdmin, admin_change_url, admin_changelist_url
|
||||
|
||||
|
||||
@admin.register(ProjectType)
|
||||
class ProjectTypeAdmin(BaseAdmin):
|
||||
def link_projects(self, obj):
|
||||
return admin_changelist_url('Projects using this ProjectType', 'pentests', 'pentestproject', {'project_type_id': obj.id})
|
||||
|
||||
def link_uploaded_assets(self, obj):
|
||||
return admin_change_url('Uploaded assets', 'pentests', 'uploadedasset', {'projecttype_id': obj.id})
|
||||
|
||||
|
||||
@admin.register(ProjectMemberRole)
|
||||
class ProjectMemberRoleAdmin(BaseAdmin):
|
||||
list_display = ['role', 'default']
|
||||
|
||||
|
||||
class ProjectMemberInfoInlineAdmin(admin.StackedInline):
|
||||
model = ProjectMemberInfo
|
||||
|
||||
|
||||
@admin.register(PentestProject)
|
||||
class PentestProjectAdmin(BaseAdmin):
|
||||
inlines = [ProjectMemberInfoInlineAdmin]
|
||||
|
||||
def link_findings(self, obj):
|
||||
return admin_changelist_url('Findings of this project', 'pentests', 'pentestfinding', {'project_id': obj.id})
|
||||
|
||||
def link_project_type(self, obj):
|
||||
return admin_change_url(obj.project_type.name, 'pentests', 'projecttype', obj.project_type.id)
|
||||
|
||||
def link_uploaded_images(self, obj):
|
||||
return admin_changelist_url('Uploaded images', 'pentests', 'uploadedimage', {'linked_object_id': obj.id})
|
||||
|
||||
def link_notes(self, obj):
|
||||
return admin_changelist_url('Notebook pages', 'pentests', 'notebookpage', {'project_id': obj.id})
|
||||
|
||||
def link_uploaded_files(self, obj):
|
||||
return admin_changelist_url('Uploaded files', 'pentests', 'uploadedprojectfile', {'linked_object_id': obj.id})
|
||||
|
||||
|
||||
@admin.register(PentestFinding)
|
||||
class PentestFindingAdmin(BaseAdmin):
|
||||
def link_project(self, obj):
|
||||
return admin_change_url(obj.project.name, 'pentests', 'pentestproject', obj.project.id)
|
||||
|
||||
|
||||
@admin.register(ReportSection)
|
||||
class ReportSectionAdmin(BaseAdmin):
|
||||
def link_project(self, obj):
|
||||
return admin_change_url(obj.project.name, 'pentests', 'pentestproject', obj.project.id)
|
||||
|
||||
|
||||
@admin.register(NotebookPage)
|
||||
class NotebookPageAdmin(BaseAdmin):
|
||||
def link_project(self, obj):
|
||||
return admin_change_url(obj.project.name, 'pentests', 'pentestproject', obj.project.id)
|
||||
|
||||
def link_user(self, obj):
|
||||
return admin_change_url(obj.user.name, 'users', 'pentestuser', obj.user.id)
|
||||
|
||||
|
||||
@admin.register(FindingTemplate)
|
||||
class FindingTemplateAdmin(BaseAdmin):
|
||||
pass
|
||||
|
||||
|
||||
@admin.register(UploadedImage)
|
||||
class UploadedImageAdmin(BaseAdmin):
|
||||
def link_project(self, obj):
|
||||
return admin_change_url(obj.linked_object.name, 'pentests', 'pentestproject', obj.linked_object.id)
|
||||
|
||||
|
||||
@admin.register(UploadedAsset)
|
||||
class UploadedAssetAdmin(BaseAdmin):
|
||||
def link_project_type(self, obj):
|
||||
return admin_change_url(obj.linked_object.name, 'pentests', 'projecttype', obj.linked_object.id)
|
||||
|
||||
|
||||
@admin.register(UploadedUserNotebookImage)
|
||||
class UploadedUserNotebookImageAdmin(BaseAdmin):
|
||||
def link_user(self, obj):
|
||||
return admin_change_url(obj.linked_object.name, 'users', 'pentestuser', obj.linked_object.id)
|
||||
|
||||
|
||||
@admin.register(UploadedProjectFile)
|
||||
class UploadedProjectFileAdmin(BaseAdmin):
|
||||
def link_project(self, obj):
|
||||
return admin_change_url(obj.linked_object.name, 'pentests', 'pentestproject', obj.linked_object.id)
|
||||
|
||||
|
||||
@admin.register(UserPublicKey)
|
||||
class UserPublicKeyAdmin(BaseAdmin):
|
||||
def link_user(self, obj):
|
||||
return admin_change_url(obj.user.username, 'users', 'pentestuser', obj.user.id)
|
||||
|
||||
def link_encrypted_key_parts(self, obj):
|
||||
return admin_changelist_url('ArchivedProjectPublicKeyEncryptedKeyPart encrypted with this public key', 'pentests', 'archivedprojectpublickeyencryptedkeypart', {'public_key_id': obj.id})
|
||||
|
||||
|
||||
@admin.register(ArchivedProject)
|
||||
class ArchivedProjectAdmin(BaseAdmin):
|
||||
def link_key_parts(self, obj):
|
||||
return admin_changelist_url('key parts', 'pentests', 'archivedprojectkeypart', {'archived_project_id': obj.id})
|
||||
|
||||
|
||||
@admin.register(ArchivedProjectKeyPart)
|
||||
class ArchivedProjectKeyPartAdmin(BaseAdmin):
|
||||
def link_user(self, obj):
|
||||
return admin_change_url(obj.user.username, 'users', 'pentestuser', obj.user.id)
|
||||
|
||||
def link_archive(self, obj):
|
||||
return admin_change_url(obj.archived_project.name, 'pentests', 'archivedproject', obj.archived_project.id)
|
||||
|
||||
def link_encrypted_key_parts(self, obj):
|
||||
return admin_changelist_url('Encrypted key part data', 'pentests', 'archivedprojectpublickeyencryptedkeypart', {'key_part_id': obj.id})
|
||||
|
||||
|
||||
@admin.register(ArchivedProjectPublicKeyEncryptedKeyPart)
|
||||
class ArchivedProjectPublicKeyEncryptedKeyPartAdmin(BaseAdmin):
|
||||
def link_key_part(self, obj):
|
||||
return admin_change_url('Archive key part', 'pentests', 'archivedprojectkeypart', obj.key_part.id)
|
||||
|
||||
def link_public_key(self, obj):
|
||||
return admin_change_url(obj.public_key.name, 'pentests', 'userpublickey', obj.public_key.id)
|
||||
|
|
@ -0,0 +1,10 @@
|
|||
from django.apps import AppConfig
|
||||
|
||||
|
||||
class PentestsConfig(AppConfig):
|
||||
default_auto_field = 'django.db.models.BigAutoField'
|
||||
name = 'reportcreator_api.pentests'
|
||||
|
||||
def ready(self) -> None:
|
||||
from . import signals # noqa
|
||||
|
|
@ -0,0 +1,149 @@
|
|||
import itertools
|
||||
from typing import Any, Iterable, Union
|
||||
from reportcreator_api.pentests import cvss
|
||||
from reportcreator_api.pentests.customfields.types import FieldDataType
|
||||
from reportcreator_api.pentests.customfields.utils import iterate_fields
|
||||
from reportcreator_api.pentests.models import PentestFinding, PentestProject, ProjectType, ReportSection, ReviewStatus
|
||||
from reportcreator_api.utils.error_messages import ErrorMessage, MessageLevel, MessageLocationInfo, MessageLocationType
|
||||
|
||||
from reportcreator_api.utils.utils import find_all_indices
|
||||
|
||||
|
||||
class ReportCheck:
|
||||
def location_info(self, obj, path=None):
|
||||
if isinstance(obj, PentestProject):
|
||||
return MessageLocationInfo(
|
||||
type=MessageLocationType.PROJECT,
|
||||
id=obj.id,
|
||||
name=obj.name,
|
||||
).for_path(path)
|
||||
elif isinstance(obj, ReportSection):
|
||||
return MessageLocationInfo(
|
||||
type=MessageLocationType.SECTION,
|
||||
id=obj.section_id,
|
||||
name=obj.section_label,
|
||||
).for_path(path)
|
||||
elif isinstance(obj, PentestFinding):
|
||||
return MessageLocationInfo(
|
||||
type=MessageLocationType.FINDING,
|
||||
id=obj.finding_id,
|
||||
name=obj.data.get('title'),
|
||||
).for_path(path)
|
||||
elif isinstance(obj, ProjectType):
|
||||
return MessageLocationInfo(
|
||||
type=MessageLocationType.DESIGN,
|
||||
id=obj.id,
|
||||
name=obj.name,
|
||||
).for_path(path)
|
||||
else:
|
||||
raise ValueError('Unsupported MessageLocationInfo')
|
||||
|
||||
def check(self, project: PentestProject) -> Iterable[ErrorMessage]:
|
||||
return itertools.chain(
|
||||
self.check_project(project),
|
||||
*map(self.check_section, project.sections.all()),
|
||||
*map(self.check_finding, project.findings.all()),
|
||||
)
|
||||
|
||||
def check_project(self, project: PentestProject) -> Iterable[ErrorMessage]:
|
||||
return []
|
||||
|
||||
def check_section(self, section: ReportSection) -> Iterable[ErrorMessage]:
|
||||
return []
|
||||
|
||||
def check_finding(self, finding: PentestFinding) -> Iterable[ErrorMessage]:
|
||||
return []
|
||||
|
||||
|
||||
class TodoCheck(ReportCheck):
|
||||
def check_todos_in_field(self, data: dict, definition: dict, location: MessageLocationInfo) -> Iterable[ErrorMessage]:
|
||||
for p, v, d in iterate_fields(value=data, definition=definition):
|
||||
if isinstance(v, str):
|
||||
snippets = []
|
||||
for idx in itertools.chain(*map(lambda s: find_all_indices(v, s), ['TODO', 'todo', 'ToDo', 'TO-DO', 'To-Do', 'To-do', 'to-do'])):
|
||||
snippet = v[idx:].splitlines()[0]
|
||||
if len(snippet) > 100:
|
||||
snippet = snippet[:100] + '...'
|
||||
snippets.append(snippet)
|
||||
if snippets:
|
||||
yield ErrorMessage(
|
||||
level=MessageLevel.WARNING,
|
||||
location=location.for_path(p),
|
||||
message='Unresolved TODO',
|
||||
details='\n'.join(snippets))
|
||||
|
||||
def check_section(self, section) -> Iterable[ErrorMessage]:
|
||||
return self.check_todos_in_field(section.data, section.field_definition, self.location_info(section))
|
||||
|
||||
def check_finding(self, finding) -> Iterable[ErrorMessage]:
|
||||
return self.check_todos_in_field(finding.data, finding.field_definition, self.location_info(finding))
|
||||
|
||||
|
||||
class EmptyFieldsCheck(ReportCheck):
|
||||
def check_field(self, data: dict, definition: dict, location: MessageLocationInfo) -> Iterable[ErrorMessage]:
|
||||
for p, v, d in iterate_fields(value=data, definition=definition):
|
||||
if getattr(d, 'required', False) and (v is None or v == '' or v == []):
|
||||
yield ErrorMessage(
|
||||
level=MessageLevel.WARNING,
|
||||
location=location.for_path(p),
|
||||
message='Empty field',
|
||||
)
|
||||
|
||||
def check_section(self, section) -> Iterable[ErrorMessage]:
|
||||
return self.check_field(section.data, section.field_definition, self.location_info(section))
|
||||
|
||||
def check_finding(self, finding) -> Iterable[ErrorMessage]:
|
||||
return self.check_field(finding.data, finding.field_definition, self.location_info(finding))
|
||||
|
||||
|
||||
class StatusCheck(ReportCheck):
|
||||
def check_status(self, obj: Union[ReportSection, PentestFinding]):
|
||||
if obj.status != ReviewStatus.FINISHED:
|
||||
yield ErrorMessage(
|
||||
level=MessageLevel.WARNING,
|
||||
location=self.location_info(obj=obj),
|
||||
message=f'Status is not "{ReviewStatus.FINISHED}"',
|
||||
details=f'Status is "{obj.status}", not status "{ReviewStatus.FINISHED}"',
|
||||
)
|
||||
|
||||
def check(self, project: PentestProject) -> Iterable[ErrorMessage]:
|
||||
# If all findings and sections have status "in-progress", deactivate this check.
|
||||
# We assume that the users of the project do not use the review feature and statuses.
|
||||
# This removed unnecessary (and ignored) warnings if no statuses are used.
|
||||
if any(map(lambda s: s.status != ReviewStatus.IN_PROGRESS, project.sections.all())) or \
|
||||
any(map(lambda f: f.status != ReviewStatus.IN_PROGRESS, project.findings.all())):
|
||||
return super().check(project)
|
||||
else:
|
||||
return []
|
||||
|
||||
def check_section(self, section: ReportSection) -> Iterable[ErrorMessage]:
|
||||
return self.check_status(section)
|
||||
|
||||
def check_finding(self, finding: PentestFinding) -> Iterable[ErrorMessage]:
|
||||
return self.check_status(finding)
|
||||
|
||||
|
||||
class CvssFieldCheck(ReportCheck):
|
||||
def check_finding(self, finding) -> Iterable[ErrorMessage]:
|
||||
for p, v, d in iterate_fields(value=finding.data, definition=finding.field_definition):
|
||||
if d.type == FieldDataType.CVSS and not cvss.is_cvss(v) and v != 'n/a' and v not in [None, '', 'n/a']:
|
||||
yield ErrorMessage(
|
||||
level=MessageLevel.WARNING,
|
||||
location=self.location_info(obj=finding, path=p),
|
||||
message='Invalid CVSS vector',
|
||||
details=f'"{v}" is not a valid CVSS vector. Enter "n/a" when no CVSS vector is applicable.'
|
||||
)
|
||||
|
||||
|
||||
def run_checks(project) -> Iterable[ErrorMessage]:
|
||||
def perform_check(checker):
|
||||
try:
|
||||
return checker.check(project)
|
||||
except Exception as ex:
|
||||
return [ErrorMessage(
|
||||
level=MessageLevel.ERROR,
|
||||
location=MessageLocationInfo(type=MessageLocationType.OTHER),
|
||||
message='Error while checking data',
|
||||
details=str(ex),
|
||||
)]
|
||||
return list(itertools.chain(*map(perform_check, [TodoCheck(), EmptyFieldsCheck(), CvssFieldCheck(), StatusCheck()])))
|
|
@ -0,0 +1,146 @@
|
|||
{
|
||||
"$id": "https://syslifters.com/reportcreator/fielddefinition.schem.json",
|
||||
"$schema": "https://json-schema.org/draft/2019-09/schema",
|
||||
"title": "Field Definition",
|
||||
"$defs": {
|
||||
"field_object": {
|
||||
"type": "object",
|
||||
"patternProperties": {
|
||||
"^[a-zA-Z_][a-zA-Z0-9_]+$": {
|
||||
"$ref": "#/$defs/field_value",
|
||||
"required": ["type", "label"]
|
||||
}
|
||||
},
|
||||
"additionalProperties": false
|
||||
},
|
||||
"field_value": {
|
||||
"type": "object",
|
||||
"required": ["type"],
|
||||
"properties": {
|
||||
"label": {
|
||||
"type": "string"
|
||||
},
|
||||
"origin": {
|
||||
"type": "string",
|
||||
"enum": ["core", "predefined", "custom"]
|
||||
}
|
||||
},
|
||||
"anyOf": [
|
||||
{
|
||||
"properties": {
|
||||
"type": {
|
||||
"enum": ["string", "markdown", "cvss"]
|
||||
},
|
||||
"default": {
|
||||
"type": ["string", "null"]
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"type": {
|
||||
"const": "date"
|
||||
},
|
||||
"default": {
|
||||
"type": ["string", "null"],
|
||||
"validate": "date"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"type": {
|
||||
"const": "number"
|
||||
},
|
||||
"default": {
|
||||
"type": ["number", "null"]
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"type": {
|
||||
"const": "boolean"
|
||||
},
|
||||
"default": {
|
||||
"type": ["boolean", "null"]
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"type": {
|
||||
"const": "user"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"required": ["choices"],
|
||||
"properties": {
|
||||
"type": {
|
||||
"const": "enum"
|
||||
},
|
||||
"choices": {
|
||||
"type": "array",
|
||||
"minItems": 1,
|
||||
"items": {
|
||||
"type": "object",
|
||||
"required": ["value", "label"],
|
||||
"properties": {
|
||||
"value": {
|
||||
"type": "string",
|
||||
"pattern": "^[a-zA-Z_][0-9a-zA-Z_-]+$"
|
||||
},
|
||||
"label": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"default": {
|
||||
"type": ["string", "null"]
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"required": ["suggestions"],
|
||||
"properties": {
|
||||
"type": {
|
||||
"const": "combobox"
|
||||
},
|
||||
"suggestions": {
|
||||
"type": "array",
|
||||
"minItems": 0,
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"required": ["properties"],
|
||||
"properties": {
|
||||
"type": {
|
||||
"const": "object"
|
||||
},
|
||||
"properties": {
|
||||
"$ref": "#/$defs/field_object"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"required": ["items"],
|
||||
"properties": {
|
||||
"type": {
|
||||
"const": "list"
|
||||
},
|
||||
"items": {
|
||||
"$ref": "#/$defs/field_value"
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"$ref": "#/$defs/field_object"
|
||||
}
|
|
@ -0,0 +1,69 @@
|
|||
from django.db import models
|
||||
from django.core.serializers.json import DjangoJSONEncoder
|
||||
from reportcreator_api.archive.crypto.fields import EncryptedField
|
||||
from reportcreator_api.pentests.customfields.types import FieldDefinition
|
||||
|
||||
from reportcreator_api.pentests.customfields.utils import HandleUndefinedFieldsOptions, ensure_defined_structure
|
||||
from reportcreator_api.pentests.customfields.validators import FieldValuesValidator
|
||||
from reportcreator_api.utils.utils import copy_keys, omit_keys, merge
|
||||
|
||||
|
||||
class CustomFieldsMixin(models.Model):
|
||||
custom_fields = models.JSONField(encoder=DjangoJSONEncoder, default=dict)
|
||||
|
||||
class Meta:
|
||||
abstract = True
|
||||
|
||||
@property
|
||||
def field_definition(self) -> dict[str, FieldDefinition]:
|
||||
return {}
|
||||
|
||||
@property
|
||||
def core_field_names(self) -> list[str]:
|
||||
return []
|
||||
|
||||
@property
|
||||
def data(self) -> dict:
|
||||
"""
|
||||
Return a dict of all field values.
|
||||
Sets default values, if a field is not defined.
|
||||
Does not include data of undefined fields not present in the definition.
|
||||
"""
|
||||
return self.get_data()
|
||||
|
||||
@property
|
||||
def data_all(self) -> dict:
|
||||
return self.get_data(include_undefined=True)
|
||||
|
||||
def get_data(self, handle_undefined=HandleUndefinedFieldsOptions.FILL_NONE, include_undefined=False) -> dict:
|
||||
# Build dict of all current values
|
||||
# Merge core fields stored directly on the model instance and custom_fields stored as dict
|
||||
out = self.custom_fields.copy()
|
||||
for k in self.core_field_names:
|
||||
out[k] = getattr(self, k)
|
||||
|
||||
# recursively check for undefined fields and set default value
|
||||
out = ensure_defined_structure(value=out, definition=self.field_definition, handle_undefined=handle_undefined, include_undefined=include_undefined)
|
||||
|
||||
return out
|
||||
|
||||
|
||||
def update_data(self, value):
|
||||
# Merge with previous custom data
|
||||
value = merge(self.data, value)
|
||||
|
||||
# Validate data
|
||||
FieldValuesValidator(self.field_definition)(value)
|
||||
|
||||
# Distribute to model fields
|
||||
for k, v in copy_keys(value, self.core_field_names).items():
|
||||
setattr(self, k, v)
|
||||
self.custom_fields = self.custom_fields | omit_keys(value, self.core_field_names)
|
||||
|
||||
|
||||
class EncryptedCustomFieldsMixin(CustomFieldsMixin):
|
||||
custom_fields = EncryptedField(base_field=models.JSONField(encoder=DjangoJSONEncoder, default=dict))
|
||||
|
||||
class Meta(CustomFieldsMixin.Meta):
|
||||
abstract = True
|
||||
|
|
@ -0,0 +1,121 @@
|
|||
from reportcreator_api.pentests.customfields.types import CvssField, EnumChoice, EnumField, FieldOrigin, ListField, MarkdownField, ObjectField, StringField, DateField, UserField, BooleanField, field_definition_to_dict
|
||||
from reportcreator_api.utils.utils import copy_keys
|
||||
|
||||
|
||||
# Core fields are the minimum required fields
|
||||
# These fields are required internally and cannot be removed or changed
|
||||
FINDING_FIELDS_CORE = {
|
||||
'title': StringField(origin=FieldOrigin.CORE, label='Title', spellcheck=True, default='TODO: Finding Title'),
|
||||
'cvss': CvssField(origin=FieldOrigin.CORE, label='CVSS', default='n/a'),
|
||||
}
|
||||
# Prdefined fields are a set of fields which
|
||||
FINDING_FIELDS_PREDEFINED = {
|
||||
'summary': MarkdownField(origin=FieldOrigin.PREDEFINED, label='Summary', required=True, default='TODO: High-level summary'),
|
||||
'description': MarkdownField(origin=FieldOrigin.PREDEFINED, label='Technical Description', required=True, default='TODO: detailed technical description what this findings is about and how it can be exploited'),
|
||||
'precondition': StringField(origin=FieldOrigin.PREDEFINED, label='Precondition', required=True, spellcheck=True, default=None),
|
||||
'impact': MarkdownField(origin=FieldOrigin.PREDEFINED, label='Impact', required=True, default='TODO: impact of finding'),
|
||||
'recommendation': MarkdownField(origin=FieldOrigin.PREDEFINED, label='Recommendation', required=True, default='TODO: how to fix the vulnerability'),
|
||||
'short_recommendation': StringField(origin=FieldOrigin.PREDEFINED, label='Short Recommendation', required=True, spellcheck=True, default='TODO: short recommendation'),
|
||||
'references': ListField(origin=FieldOrigin.PREDEFINED, label='References', required=False,
|
||||
items=StringField(origin=FieldOrigin.PREDEFINED, label='Reference', default=None)),
|
||||
'affected_components': ListField(origin=FieldOrigin.PREDEFINED, label='Affected Components', required=True,
|
||||
items=StringField(origin=FieldOrigin.PREDEFINED, label='Component', default='TODO: affected component')),
|
||||
'owasp_top10_2021': EnumField(origin=FieldOrigin.PREDEFINED, label='OWASP Top 10 - 2021', required=True, default=None, choices=[
|
||||
EnumChoice(value='A01_2021', label='A01:2021 - Broken Access Control'),
|
||||
EnumChoice(value='A02_2021', label='A02:2021 - Cryptographic Failures'),
|
||||
EnumChoice(value='A03_2021', label='A03:2021 - Injection'),
|
||||
EnumChoice(value='A04_2021', label='A04:2021 - Insecure Design'),
|
||||
EnumChoice(value='A05_2021', label='A05:2021 - Security Misconfiguration'),
|
||||
EnumChoice(value='A06_2021', label='A06:2021 - Vulnerable and Outdated Components'),
|
||||
EnumChoice(value='A07_2021', label='A07:2021 - Identification and Authentication Failures'),
|
||||
EnumChoice(value='A08_2021', label='A08:2021 - Software and Data Integrity Failures'),
|
||||
EnumChoice(value='A09_2021', label='A09:2021 - Security Logging and Monitoring Failures'),
|
||||
EnumChoice(value='A10_2021', label='A10:2021 - Server-Side Request Forgery (SSRF)'),
|
||||
]),
|
||||
'wstg_category': EnumField(origin=FieldOrigin.PREDEFINED, label='OWASP Web Security Testing Guide Category', required=True, default=None, choices=[
|
||||
EnumChoice(value='INFO', label='INFO - Information Gathering'),
|
||||
EnumChoice(value='CONF', label='CONF - Configuration and Deployment Management'),
|
||||
EnumChoice(value='IDNT', label='IDNT - Identity Management'),
|
||||
EnumChoice(value='ATHN', label='ATHN - Authentication'),
|
||||
EnumChoice(value='ATHZ', label='ATHZ - Authorization'),
|
||||
EnumChoice(value='SESS', label='SESS - Session Management'),
|
||||
EnumChoice(value='INPV', label='INPV - Input Validation'),
|
||||
EnumChoice(value='ERRH', label='ERRH - Error Handling'),
|
||||
EnumChoice(value='CRYP', label='CRYP - Weak Cryptography'),
|
||||
EnumChoice(value='BUSL', label='BUSL - Business Logic'),
|
||||
EnumChoice(value='CLNT', label='CLNT - Client-side Testing'),
|
||||
EnumChoice(value='APIT', label='APIT - API Testing'),
|
||||
]),
|
||||
|
||||
'retest_notes': MarkdownField(origin=FieldOrigin.PREDEFINED, label='Re-test Notes', required=False, default=None),
|
||||
'retest_status': EnumField(origin=FieldOrigin.PREDEFINED, label='Re-test Status', required=False, default=None, choices=[
|
||||
EnumChoice(value='open', label='Open'),
|
||||
EnumChoice(value='resolved', label='Resolved'),
|
||||
EnumChoice(value='partial', label='Partially Resolved'),
|
||||
EnumChoice(value='changed', label='Changed'),
|
||||
EnumChoice(value='accepted', label='Accepted'),
|
||||
EnumChoice(value='new', label='New'),
|
||||
]),
|
||||
}
|
||||
|
||||
REPORT_FIELDS_CORE = {
|
||||
'title': StringField(origin=FieldOrigin.CORE, label='Title', required=True, spellcheck=True, default='TODO: Report Title'),
|
||||
}
|
||||
REPORT_FIELDS_PREDEFINED = {
|
||||
'is_retest': BooleanField(origin=FieldOrigin.PREDEFINED, label='Is Retest', default=False),
|
||||
}
|
||||
|
||||
|
||||
def finding_fields_default():
|
||||
return field_definition_to_dict(
|
||||
FINDING_FIELDS_CORE | copy_keys(FINDING_FIELDS_PREDEFINED, ['summary', 'description', 'impact', 'recommendation', 'affected_components', 'references']) | {
|
||||
'short_recommendation': StringField(label='Short Recommendation', required=True, default='TODO: short recommendation'),
|
||||
})
|
||||
|
||||
|
||||
def finding_field_order_default():
|
||||
return [
|
||||
'title', 'cvss', 'affected_components',
|
||||
'summary', 'short_recommendation',
|
||||
'description', 'impact', 'recommendation',
|
||||
'references'
|
||||
]
|
||||
|
||||
|
||||
|
||||
def report_fields_default():
|
||||
return field_definition_to_dict(REPORT_FIELDS_CORE | {
|
||||
'scope': MarkdownField(label='Scope', required=True, default='TODO: The scope of this pentest included:\n* Active Directory Domain xyz\n* Internal server network 10.20.30.40/24'),
|
||||
'executive_summary': MarkdownField(label='Executive Summary', required=True, default='**TODO: write executive summary**'),
|
||||
'customer': StringField(label='Customer', required=True, default='TODO company'),
|
||||
'duration': StringField(label='Duration', required=True, default='TODO person days'),
|
||||
'start_date': DateField(label='Pentest Start Date', required=True, default=None),
|
||||
'end_date': DateField(label='Pentest End Date', required=True, default=None),
|
||||
'document_history': ListField(label='Document History', required=True, items=ObjectField(properties={
|
||||
'version': StringField(label='Version', required=True, default='TODO: 1.0'),
|
||||
'date': DateField(label='Date', required=True, default=None),
|
||||
'description': StringField(label='Description', required=True, default='TODO: description'),
|
||||
'authors': ListField(label='Authors', required=True, items=UserField(required=True)),
|
||||
}))
|
||||
})
|
||||
|
||||
|
||||
def report_sections_default():
|
||||
return [
|
||||
{
|
||||
'id': 'executive_summary',
|
||||
'label': 'Executive Summary',
|
||||
'fields': ['executive_summary'],
|
||||
},
|
||||
{
|
||||
'id': 'scope',
|
||||
'label': 'Scope',
|
||||
'fields': ['scope', 'duration', 'start_date', 'end_date'],
|
||||
},
|
||||
{
|
||||
'id': 'customer',
|
||||
'label': 'Customer',
|
||||
'fields': ['customer'],
|
||||
}
|
||||
# Other fields not defined elsewhere are put in the "other" section
|
||||
]
|
|
@ -0,0 +1,27 @@
|
|||
{
|
||||
"$id": "https://syslifters.com/reportcreator/sectionddefinition.schem.json",
|
||||
"$schema": "https://json-schema.org/draft/2019-09/schema",
|
||||
"title": "Section Definition",
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"required": ["id", "label", "fields"],
|
||||
"properties": {
|
||||
"id": {
|
||||
"type": "string",
|
||||
"format": "^[a-zA-Z0-9_-]+$",
|
||||
"maxLength": 255
|
||||
},
|
||||
"label": {
|
||||
"type": "string"
|
||||
},
|
||||
"fields": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
"minItems": 0
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,74 @@
|
|||
import datetime
|
||||
from typing import OrderedDict
|
||||
from uuid import UUID
|
||||
from rest_framework import serializers
|
||||
from reportcreator_api.pentests.customfields.types import FieldDataType, FieldDefinition
|
||||
from reportcreator_api.users.models import PentestUser
|
||||
|
||||
|
||||
class DynamicObjectSerializer(serializers.Serializer):
|
||||
def __init__(self, *args, **kwargs):
|
||||
self._declared_fields = kwargs.pop('fields', {})
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
|
||||
class DateField(serializers.DateField):
|
||||
def to_internal_value(self, value):
|
||||
date = super().to_internal_value(value)
|
||||
if isinstance(date, datetime.date):
|
||||
return date.isoformat()
|
||||
else:
|
||||
return date
|
||||
|
||||
|
||||
class UserField(serializers.PrimaryKeyRelatedField):
|
||||
queryset = PentestUser.objects.all()
|
||||
|
||||
def to_internal_value(self, data):
|
||||
if isinstance(data, (str, UUID)) and (project := self.context.get('project')) and \
|
||||
project.imported_members and (imported_user := next(filter(lambda u: data == u.get('id'), project.imported_members), None)):
|
||||
return imported_user.get('id')
|
||||
|
||||
user = super().to_internal_value(data)
|
||||
return str(user.id) if isinstance(user, PentestUser) else user
|
||||
|
||||
def to_representation(self, value):
|
||||
if isinstance(value, (str, UUID)):
|
||||
return value
|
||||
return super().to_representation(value)
|
||||
|
||||
|
||||
def serializer_from_definition(definition: dict[str, FieldDefinition], **kwargs):
|
||||
return DynamicObjectSerializer(
|
||||
fields=dict(filter(lambda t: t[1] is not None, map(lambda t: (t[0], serializer_from_field(t[1])), definition.items()))),
|
||||
**kwargs)
|
||||
|
||||
|
||||
def serializer_from_field(definition):
|
||||
field_kwargs = {
|
||||
'label': definition.label,
|
||||
'required': False,
|
||||
}
|
||||
value_field_kwargs = field_kwargs | {
|
||||
'allow_null': True,
|
||||
# 'default': getattr(definition, 'default', None),
|
||||
}
|
||||
field_type = definition.type
|
||||
if field_type in [FieldDataType.STRING, FieldDataType.MARKDOWN, FieldDataType.CVSS, FieldDataType.COMBOBOX]:
|
||||
return serializers.CharField(trim_whitespace=False, allow_blank=True, **value_field_kwargs)
|
||||
elif field_type == FieldDataType.DATE:
|
||||
return DateField(**value_field_kwargs)
|
||||
elif field_type == FieldDataType.NUMBER:
|
||||
return serializers.FloatField(**value_field_kwargs)
|
||||
elif field_type == FieldDataType.BOOLEAN:
|
||||
return serializers.BooleanField(**value_field_kwargs)
|
||||
elif field_type == FieldDataType.ENUM:
|
||||
return serializers.ChoiceField(choices=[c.value for c in definition.choices], **value_field_kwargs)
|
||||
elif field_type == FieldDataType.USER:
|
||||
return UserField(**value_field_kwargs)
|
||||
elif field_type == FieldDataType.OBJECT:
|
||||
return serializer_from_definition(definition.properties, **field_kwargs)
|
||||
elif field_type == FieldDataType.LIST:
|
||||
return serializers.ListField(child=serializer_from_field(definition.items), allow_empty=True, **field_kwargs)
|
||||
else:
|
||||
raise ValueError(f'Encountered unsupported type in field definition: "{field_type}"')
|
|
@ -0,0 +1,219 @@
|
|||
import dataclasses
|
||||
import enum
|
||||
from datetime import date
|
||||
from importlib.metadata import requires
|
||||
from inspect import isclass
|
||||
from types import GenericAlias
|
||||
from typing import Any, Optional, Union
|
||||
from django.utils.deconstruct import deconstructible
|
||||
from reportcreator_api.utils.utils import is_date_string
|
||||
|
||||
|
||||
@enum.unique
|
||||
class FieldDataType(enum.Enum):
|
||||
STRING = 'string'
|
||||
MARKDOWN = 'markdown'
|
||||
CVSS = 'cvss'
|
||||
DATE = 'date'
|
||||
NUMBER = 'number'
|
||||
BOOLEAN = 'boolean'
|
||||
ENUM = 'enum'
|
||||
COMBOBOX = 'combobox'
|
||||
USER = 'user'
|
||||
OBJECT = 'object'
|
||||
LIST = 'list'
|
||||
|
||||
|
||||
@enum.unique
|
||||
class FieldOrigin(enum.Enum):
|
||||
CORE = 'core'
|
||||
PREDEFINED = 'predefined'
|
||||
CUSTOM = 'custom'
|
||||
|
||||
|
||||
@deconstructible
|
||||
@dataclasses.dataclass
|
||||
class FieldDefinition:
|
||||
type: FieldDataType = None
|
||||
label: str = ''
|
||||
origin: FieldOrigin = FieldOrigin.CUSTOM
|
||||
|
||||
|
||||
@deconstructible
|
||||
@dataclasses.dataclass
|
||||
class BaseStringField(FieldDefinition):
|
||||
default: Optional[str] = None
|
||||
required: bool = True
|
||||
|
||||
|
||||
@deconstructible
|
||||
@dataclasses.dataclass
|
||||
class StringField(BaseStringField):
|
||||
spellcheck: bool = False
|
||||
type: FieldDataType = FieldDataType.STRING
|
||||
|
||||
|
||||
@deconstructible
|
||||
@dataclasses.dataclass
|
||||
class MarkdownField(BaseStringField):
|
||||
type: FieldDataType = FieldDataType.MARKDOWN
|
||||
|
||||
|
||||
@deconstructible
|
||||
@dataclasses.dataclass
|
||||
class CvssField(BaseStringField):
|
||||
type: FieldDataType = FieldDataType.CVSS
|
||||
|
||||
|
||||
@deconstructible
|
||||
@dataclasses.dataclass
|
||||
class ComboboxField(BaseStringField):
|
||||
type: FieldDataType = FieldDataType.COMBOBOX
|
||||
suggestions: list[str] = dataclasses.field(default_factory=list)
|
||||
|
||||
|
||||
@deconstructible
|
||||
@dataclasses.dataclass
|
||||
class DateField(FieldDefinition):
|
||||
default: Optional[str] = None
|
||||
required: bool = True
|
||||
type: FieldDataType = FieldDataType.DATE
|
||||
|
||||
def __post_init__(self):
|
||||
if self.default and not is_date_string(self.default):
|
||||
raise ValueError('Default value is not a date', self.default)
|
||||
|
||||
|
||||
@deconstructible
|
||||
@dataclasses.dataclass
|
||||
class EnumChoice:
|
||||
value: str
|
||||
label: str = None
|
||||
|
||||
def __post_init__(self):
|
||||
self.label = self.value if not self.label else self.label
|
||||
|
||||
|
||||
@deconstructible
|
||||
@dataclasses.dataclass
|
||||
class EnumField(FieldDefinition):
|
||||
choices: list[EnumChoice] = dataclasses.field(default_factory=list)
|
||||
default: Optional[str] = None
|
||||
required: bool = True
|
||||
type: FieldDataType = FieldDataType.ENUM
|
||||
|
||||
def __post_init__(self):
|
||||
if self.default and self.default not in {c.value for c in self.choices}:
|
||||
raise ValueError('Default value is not a valid enum choice', self.default)
|
||||
|
||||
|
||||
@deconstructible
|
||||
@dataclasses.dataclass
|
||||
class NumberField(FieldDefinition):
|
||||
default: Optional[Union[float, int]] = None
|
||||
required: bool = True
|
||||
type: FieldDataType = FieldDataType.NUMBER
|
||||
|
||||
|
||||
@deconstructible
|
||||
@dataclasses.dataclass
|
||||
class BooleanField(FieldDefinition):
|
||||
default: Optional[bool] = None
|
||||
type: FieldDataType = FieldDataType.BOOLEAN
|
||||
|
||||
|
||||
@deconstructible
|
||||
@dataclasses.dataclass
|
||||
class UserField(FieldDefinition):
|
||||
required: bool = True
|
||||
type: FieldDataType = FieldDataType.USER
|
||||
|
||||
|
||||
@deconstructible
|
||||
@dataclasses.dataclass
|
||||
class ObjectField(FieldDefinition):
|
||||
properties: dict[str, FieldDefinition] = dataclasses.field(default_factory=dict)
|
||||
type: FieldDataType = FieldDataType.OBJECT
|
||||
|
||||
|
||||
@deconstructible
|
||||
@dataclasses.dataclass
|
||||
class ListField(FieldDefinition):
|
||||
items: FieldDefinition = None
|
||||
required: bool = True
|
||||
type: FieldDataType = FieldDataType.LIST
|
||||
|
||||
|
||||
_FIELD_DATA_TYPE_CLASSES_MAPPING = {
|
||||
FieldDataType.STRING: StringField,
|
||||
FieldDataType.MARKDOWN: MarkdownField,
|
||||
FieldDataType.CVSS: CvssField,
|
||||
FieldDataType.DATE: DateField,
|
||||
FieldDataType.NUMBER: NumberField,
|
||||
FieldDataType.BOOLEAN: BooleanField,
|
||||
FieldDataType.ENUM: EnumField,
|
||||
FieldDataType.COMBOBOX: ComboboxField,
|
||||
FieldDataType.USER: UserField,
|
||||
FieldDataType.OBJECT: ObjectField,
|
||||
FieldDataType.LIST: ListField,
|
||||
}
|
||||
|
||||
|
||||
def _field_from_dict(t: type, v: Union[dict, str, Any]) -> FieldDefinition:
|
||||
if isinstance(t, GenericAlias):
|
||||
if t.__origin__ is list and isinstance(v, list):
|
||||
return [_field_from_dict(t.__args__[0], e) for e in v]
|
||||
elif t.__origin__ is dict and isinstance(v, dict):
|
||||
return {_field_from_dict(t.__args__[0], k): _field_from_dict(t.__args__[1], e) for k, e in v.items()}
|
||||
elif isinstance(v, t):
|
||||
return v
|
||||
elif isclass(t) and issubclass(t, enum.Enum):
|
||||
return t(v)
|
||||
elif isinstance(t, date) and isinstance(v, str):
|
||||
return date.fromisoformat(v)
|
||||
elif dataclasses.is_dataclass(t) and isinstance(v, dict):
|
||||
field_types = {f.name: f.type for f in dataclasses.fields(t)}
|
||||
dataclass_args = {f: _field_from_dict(field_types[f], v[f]) for f in field_types if f in v}
|
||||
try:
|
||||
return t(**dataclass_args)
|
||||
except TypeError:
|
||||
pass
|
||||
|
||||
raise ValueError('Could not decode field definition', v)
|
||||
|
||||
|
||||
def _parse_field_definition_entry(definition: dict) -> FieldDefinition:
|
||||
if 'type' not in definition:
|
||||
raise ValueError('Field type missing')
|
||||
|
||||
type = FieldDataType(definition['type'])
|
||||
type_class = _FIELD_DATA_TYPE_CLASSES_MAPPING[type]
|
||||
val = _field_from_dict(type_class, definition)
|
||||
if type == FieldDataType.OBJECT:
|
||||
val.properties = parse_field_definition(definition.get('properties', {}))
|
||||
elif type == FieldDataType.LIST:
|
||||
val.items = _parse_field_definition_entry(definition.get('items', {}))
|
||||
|
||||
return val
|
||||
|
||||
|
||||
def parse_field_definition(definition: dict) -> dict[str, FieldDefinition]:
|
||||
out = {}
|
||||
for k, d in definition.items():
|
||||
out[k] = _parse_field_definition_entry(d)
|
||||
return out
|
||||
|
||||
|
||||
def field_definition_to_dict(definition: Union[dict[str, FieldDefinition], Any]):
|
||||
if isinstance(definition, dict):
|
||||
return {k: field_definition_to_dict(v) for k, v in definition.items()}
|
||||
elif isinstance(definition, list):
|
||||
return [field_definition_to_dict(e) for e in definition]
|
||||
elif dataclasses.is_dataclass(definition):
|
||||
return field_definition_to_dict(dataclasses.asdict(definition))
|
||||
elif isinstance(definition, enum.Enum):
|
||||
return definition.value
|
||||
elif isinstance(definition, date):
|
||||
return date.isoformat()
|
||||
else:
|
||||
return definition
|
|
@ -0,0 +1,186 @@
|
|||
import dataclasses
|
||||
import enum
|
||||
import random
|
||||
from lorem_text import lorem
|
||||
from typing import Any, Iterable, Optional, Union, OrderedDict
|
||||
from django.utils import timezone
|
||||
|
||||
from reportcreator_api.pentests.customfields.types import FieldDataType, FieldDefinition, FieldOrigin
|
||||
from reportcreator_api.utils.utils import is_date_string, is_uuid
|
||||
from reportcreator_api.utils.error_messages import format_path
|
||||
|
||||
|
||||
def contains(a, b):
|
||||
"""
|
||||
Checks if dict a contains dict b recursively
|
||||
"""
|
||||
if not b:
|
||||
return True
|
||||
|
||||
if type(a) != type(b):
|
||||
return False
|
||||
|
||||
for k, v in b.items():
|
||||
if k not in a:
|
||||
return False
|
||||
if isinstance(v, dict):
|
||||
if not contains(a[k], v):
|
||||
return False
|
||||
elif isinstance(v, (list, tuple)):
|
||||
raise ValueError('Cannot diff lists')
|
||||
elif v != b[k]:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
def has_field_structure_changed(old: dict[str, FieldDefinition], new: dict[str, FieldDefinition]):
|
||||
if set(old.keys()) != set(new.keys()):
|
||||
return True
|
||||
|
||||
for k in old.keys():
|
||||
field_type = old[k].type
|
||||
if field_type != new[k].type:
|
||||
return True
|
||||
elif field_type == FieldDataType.OBJECT and has_field_structure_changed(old[k].properties, new[k].properties):
|
||||
return True
|
||||
elif field_type == FieldDataType.LIST and has_field_structure_changed({'items': old[k].items}, {'items': new[k].items}):
|
||||
return True
|
||||
elif field_type == FieldDataType.ENUM and set(map(lambda c: c.value, old[k].choices)) - set(map(lambda c: c.value, new[k].choices)):
|
||||
# Existing enum choice was removed
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
class HandleUndefinedFieldsOptions(enum.Enum):
|
||||
FILL_NONE = 'fill_none'
|
||||
FILL_DEFAULT = 'fill_default'
|
||||
FILL_DEMO_DATA = 'fill_demo_data'
|
||||
|
||||
|
||||
def _default_or_demo_data(definition: FieldDefinition, demo_data: Any, handle_undefined: HandleUndefinedFieldsOptions):
|
||||
if handle_undefined == HandleUndefinedFieldsOptions.FILL_NONE:
|
||||
return None
|
||||
elif handle_undefined == HandleUndefinedFieldsOptions.FILL_DEFAULT:
|
||||
return definition.default
|
||||
elif handle_undefined == HandleUndefinedFieldsOptions.FILL_DEMO_DATA:
|
||||
return definition.default or demo_data
|
||||
|
||||
|
||||
def ensure_defined_structure(value, definition: Union[dict[str, FieldDefinition], FieldDefinition], handle_undefined: HandleUndefinedFieldsOptions = HandleUndefinedFieldsOptions.FILL_DEFAULT, include_undefined=False):
|
||||
"""
|
||||
Ensure that the returned data is valid for the given field definition.
|
||||
Recursively check for undefined fields and set a value.
|
||||
Returns only data of defined fields, if value contains undefined fields, this data is not returned.
|
||||
"""
|
||||
if isinstance(definition, dict):
|
||||
out = value if include_undefined else {}
|
||||
for k, d in definition.items():
|
||||
out[k] = ensure_defined_structure(value=(value if isinstance(value, dict) else {}).get(k), definition=d, handle_undefined=handle_undefined)
|
||||
return out
|
||||
else:
|
||||
if definition.type == FieldDataType.OBJECT:
|
||||
return ensure_defined_structure(value, definition.properties, handle_undefined=handle_undefined)
|
||||
elif definition.type == FieldDataType.LIST:
|
||||
if isinstance(value, list):
|
||||
return [ensure_defined_structure(value=e, definition=definition.items, handle_undefined=handle_undefined) for e in value]
|
||||
else:
|
||||
if handle_undefined == HandleUndefinedFieldsOptions.FILL_DEMO_DATA and definition.items.type != FieldDataType.USER:
|
||||
return [ensure_defined_structure(value=None, definition=definition.items, handle_undefined=handle_undefined) for _ in range(2)]
|
||||
else:
|
||||
return []
|
||||
elif definition.type == FieldDataType.MARKDOWN and not isinstance(value, str):
|
||||
return _default_or_demo_data(definition, lorem.paragraphs(3), handle_undefined=handle_undefined)
|
||||
elif definition.type == FieldDataType.STRING and not isinstance(value, str):
|
||||
return _default_or_demo_data(definition, lorem.words(2), handle_undefined=handle_undefined)
|
||||
elif definition.type == FieldDataType.CVSS and not isinstance(value, str):
|
||||
return _default_or_demo_data(definition, 'n/a', handle_undefined=handle_undefined)
|
||||
elif definition.type == FieldDataType.ENUM and not (isinstance(value, str) and value in {c.value for c in definition.choices}):
|
||||
return _default_or_demo_data(definition, next(iter(map(lambda c: c.value, definition.choices)), None), handle_undefined=handle_undefined)
|
||||
elif definition.type == FieldDataType.COMBOBOX and not isinstance(value, str):
|
||||
return _default_or_demo_data(definition, next(iter(definition.suggestions), None), handle_undefined=handle_undefined)
|
||||
elif definition.type == FieldDataType.DATE and not (isinstance(value, str) and is_date_string(value)):
|
||||
return _default_or_demo_data(definition, timezone.now().date().isoformat(), handle_undefined=handle_undefined)
|
||||
elif definition.type == FieldDataType.NUMBER and not isinstance(value, (int, float)):
|
||||
return _default_or_demo_data(definition, random.randint(1, 10), handle_undefined=handle_undefined)
|
||||
elif definition.type == FieldDataType.BOOLEAN and not isinstance(value, bool):
|
||||
return _default_or_demo_data(definition, random.choice([True, False]), handle_undefined=handle_undefined)
|
||||
elif definition.type == FieldDataType.USER and not (isinstance(value, str) or is_uuid(value)):
|
||||
return None
|
||||
else:
|
||||
return value
|
||||
|
||||
|
||||
def check_definitions_compatible(a: Union[dict[str, FieldDefinition], FieldDefinition], b: Union[dict[str, FieldDefinition], FieldDefinition], path: Optional[tuple[str]] = None) -> tuple[bool, list[str]]:
|
||||
"""
|
||||
Check if definitions are compatible and values can be converted without data loss.
|
||||
"""
|
||||
path = path or tuple()
|
||||
valid = True
|
||||
errors = []
|
||||
if isinstance(a, dict) and isinstance(b, dict):
|
||||
for k in set(a.keys()).intersection(b.keys()):
|
||||
res_valid, res_errors = check_definitions_compatible(a[k], b[k], path=path + tuple([k]))
|
||||
valid = valid and res_valid
|
||||
errors.extend(res_errors)
|
||||
elif isinstance(a, FieldDefinition) and isinstance(b, FieldDefinition):
|
||||
if a.type != b.type:
|
||||
valid = False
|
||||
errors.append(f'Field "{format_path(path)}" has different types: "{a.type.value}" vs. "{b.type.value}"')
|
||||
elif a.type == FieldDataType.LIST:
|
||||
res_valid, res_errors = check_definitions_compatible(a.items, b.items, path=path + tuple(['[]']))
|
||||
valid = valid and res_valid
|
||||
errors.extend(res_errors)
|
||||
elif a.type == FieldDataType.ENUM:
|
||||
missing_choices = {c.value for c in a.choices} - {c.value for c in b.choices}
|
||||
if missing_choices:
|
||||
valid = False
|
||||
missing_choices_str = ', '.join(map(lambda c: f'"{c}"', missing_choices))
|
||||
errors.append(f'Field "{format_path(path)}" has missing enum choices: {missing_choices_str}')
|
||||
return valid, errors
|
||||
|
||||
|
||||
def set_field_origin(definition: Union[dict[str, FieldDefinition], FieldDefinition], predefined_fields: Union[dict, FieldDefinition, None]):
|
||||
"""
|
||||
Sets definition.origin recursively
|
||||
"""
|
||||
if isinstance(definition, dict):
|
||||
out = {}
|
||||
for k, d in definition.items():
|
||||
out[k] = set_field_origin(d, predefined_fields=predefined_fields.get(k) if predefined_fields else None)
|
||||
return out
|
||||
else:
|
||||
out = dataclasses.replace(definition, origin=getattr(predefined_fields, 'origin', FieldOrigin.CUSTOM))
|
||||
|
||||
if out.type == FieldDataType.OBJECT:
|
||||
out.properties = set_field_origin(out.properties, predefined_fields=getattr(predefined_fields, 'properties', None))
|
||||
elif out.type == FieldDataType.LIST:
|
||||
out.items = set_field_origin(out.items, predefined_fields=getattr(predefined_fields, 'items', None))
|
||||
return out
|
||||
|
||||
|
||||
def iterate_fields(value: Union[dict, Any], definition: Union[dict[str, FieldDefinition], FieldDefinition], path: Optional[tuple[str]] = None) -> Iterable[tuple[tuple[str], Any, FieldDefinition]]:
|
||||
"""
|
||||
Recursively iterate over all defined fields
|
||||
"""
|
||||
if not definition:
|
||||
return
|
||||
|
||||
path = path or tuple()
|
||||
if isinstance(definition, dict):
|
||||
for k, d in definition.items():
|
||||
yield from iterate_fields(value=(value if isinstance(value, dict) else {}).get(k), definition=d, path=path + tuple([k]))
|
||||
else:
|
||||
# Current field
|
||||
yield path, value, definition
|
||||
|
||||
# Nested structures
|
||||
if definition.type == FieldDataType.OBJECT:
|
||||
yield from iterate_fields(value=value or {}, definition=definition.properties, path=path)
|
||||
elif definition.type == FieldDataType.LIST:
|
||||
for idx, v in enumerate(value if isinstance(value, list) else []):
|
||||
yield from iterate_fields(value=v, definition=definition.items, path=path + tuple(['[' + str(idx) + ']']))
|
||||
|
|
@ -0,0 +1,136 @@
|
|||
import functools
|
||||
import itertools
|
||||
import json
|
||||
import jsonschema
|
||||
from pathlib import Path
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.utils.deconstruct import deconstructible
|
||||
from reportcreator_api.pentests.customfields.types import FieldDataType, FieldDefinition, parse_field_definition
|
||||
|
||||
from reportcreator_api.pentests.customfields.utils import contains
|
||||
|
||||
|
||||
|
||||
@functools.cache
|
||||
def get_field_definition_schema():
|
||||
return jsonschema.Draft202012Validator(schema=json.loads((Path(__file__).parent / 'fielddefinition.schema.json').read_text()))
|
||||
|
||||
|
||||
@functools.cache
|
||||
def get_section_definition_schema():
|
||||
return jsonschema.Draft202012Validator(schema=json.loads((Path(__file__).parent / 'sectiondefinition.schema.json').read_text()))
|
||||
|
||||
|
||||
@deconstructible
|
||||
class FieldDefinitionValidator:
|
||||
def __init__(self, core_fields=None, predefined_fields=None) -> None:
|
||||
self.core_fields = core_fields or {}
|
||||
self.predefined_fields = predefined_fields or {}
|
||||
|
||||
def definition_contains(self, val: FieldDefinition, ref: FieldDefinition):
|
||||
"""
|
||||
Check if data types and structure of field definitions match recursively
|
||||
The defintion `ref` has to be included in `val`.
|
||||
`val` may extend the nested structure by adding fields, but may not remove any fields.
|
||||
"""
|
||||
if val.type != ref.type:
|
||||
return False
|
||||
if val.type == FieldDataType.OBJECT:
|
||||
if set(ref.properties.keys()).difference(val.properties.keys()):
|
||||
return False
|
||||
return all([self.definition_contains(val.properties[k], d) for k, d in ref.properties.items()])
|
||||
elif val.type == FieldDataType.LIST:
|
||||
return self.definition_contains(val.items, ref.items)
|
||||
return True
|
||||
|
||||
def __call__(self, value: dict):
|
||||
try:
|
||||
get_field_definition_schema().validate(value)
|
||||
except jsonschema.ValidationError as ex:
|
||||
raise ValidationError('Invalid field definition') from ex
|
||||
|
||||
parsed_value = parse_field_definition(value)
|
||||
# validate core fields:
|
||||
# required
|
||||
# structure cannot be changed
|
||||
# labels and default values can be changed
|
||||
for k, d in self.core_fields.items():
|
||||
if k not in parsed_value:
|
||||
raise ValidationError(f'Core field "{k}" is required')
|
||||
if not self.definition_contains(parsed_value[k], d):
|
||||
raise ValidationError(f'Cannot change structure of core field "{k}"')
|
||||
|
||||
# validate predefined fields:
|
||||
# not required
|
||||
# base structure cannot be changed, but can be extended
|
||||
# labels and default values can be changed
|
||||
for k, d in self.predefined_fields.items():
|
||||
if k in parsed_value and not self.definition_contains(parsed_value[k], d):
|
||||
raise ValidationError(f'Cannot change structure of predefined field "{k}"')
|
||||
|
||||
|
||||
@deconstructible
|
||||
class FieldValuesValidator:
|
||||
def __init__(self, field_definitions: dict[str, FieldDefinition]) -> None:
|
||||
self.schema = self.compile_definition_to_schema(field_definitions)
|
||||
|
||||
def compile_object(self, definition: dict):
|
||||
return {
|
||||
'type': 'object',
|
||||
'additionalProperties': True,
|
||||
'properties': dict(map(lambda t: (t[0], self.compile_field(t[1])), definition.items())),
|
||||
'required': list(definition.keys()),
|
||||
}
|
||||
|
||||
def compile_field(self, definition: FieldDataType):
|
||||
field_type = definition.type
|
||||
if field_type in [FieldDataType.STRING, FieldDataType.MARKDOWN, FieldDataType.CVSS, FieldDataType.COMBOBOX]:
|
||||
return {'type': ['string', 'null']}
|
||||
elif field_type == FieldDataType.DATE:
|
||||
return {'type': ['string', 'null'], 'format': 'date'}
|
||||
elif field_type == FieldDataType.NUMBER:
|
||||
return {'type': ['number', 'null']}
|
||||
elif field_type == FieldDataType.BOOLEAN:
|
||||
return {'type': ['boolean', 'null']}
|
||||
elif field_type == FieldDataType.ENUM:
|
||||
return {'type': ['string', 'null'], 'enum': [c.value for c in definition.choices] + [None]}
|
||||
elif field_type == FieldDataType.USER:
|
||||
return {'type': ['string', 'null'], 'format': 'uuid'}
|
||||
elif field_type == FieldDataType.OBJECT:
|
||||
return self.compile_object(definition.properties)
|
||||
elif field_type == FieldDataType.LIST:
|
||||
return {'type': 'array', 'items': self.compile_field(definition.items)}
|
||||
else:
|
||||
raise ValueError(f'Encountered invalid type in field definition: "{field_type}"')
|
||||
|
||||
def compile_definition_to_schema(self, field_definitions):
|
||||
return jsonschema.Draft202012Validator(schema={
|
||||
"$schema": "https://json-schema.org/draft/2019-09/schema",
|
||||
**self.compile_object(field_definitions),
|
||||
})
|
||||
|
||||
def __call__(self, value):
|
||||
try:
|
||||
self.schema.validate(value)
|
||||
except jsonschema.ValidationError as ex:
|
||||
raise ValidationError('Data does not match field definition') from ex
|
||||
|
||||
|
||||
@deconstructible
|
||||
class SectionDefinitionValidator:
|
||||
def __call__(self, value):
|
||||
try:
|
||||
get_section_definition_schema().validate(value)
|
||||
except jsonschema.ValidationError as ex:
|
||||
raise ValidationError('Invalid section definition') from ex
|
||||
|
||||
# validate unique section IDs
|
||||
section_ids = [s['id'] for s in value]
|
||||
if len(section_ids) != len(set(section_ids)):
|
||||
raise ValidationError('Invalid section definition: Duplicate section IDs')
|
||||
|
||||
# validate that a field is at most in one section
|
||||
section_fields = list(itertools.chain(*map(lambda s: s['fields'], value)))
|
||||
if len(section_fields) != len(set(section_fields)):
|
||||
raise ValidationError('Invalid section definition: Field in multiple sections')
|
||||
|
|
@ -0,0 +1,309 @@
|
|||
import math
|
||||
import re
|
||||
from typing import Any, Union
|
||||
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from django.db import models
|
||||
|
||||
|
||||
class CVSSLevel(models.TextChoices):
|
||||
INFO = 'info', _('Info')
|
||||
LOW = 'low', _('Low')
|
||||
MEDIUM = 'medium', _('Medium')
|
||||
HIGH = 'high', _('High')
|
||||
CRITICAL = 'critical', _('Critical')
|
||||
|
||||
|
||||
CVSS3_REGEX = re.compile(r'^CVSS:3.[01](/[A-Za-z]+:[A-Za-z])+$')
|
||||
CVSS3_METRICS_BASE = {
|
||||
'AV': {'N': 0.85, 'A': 0.62, 'L': 0.55, 'P': 0.2},
|
||||
'AC': {'L': 0.77, 'H': 0.44},
|
||||
'PR': {'N': {'U': 0.85, 'C': 0.85}, 'L': {'U': 0.62, 'C': 0.68}, 'H': {'U': 0.27, 'C': 0.5}},
|
||||
'UI': {'N': 0.85, 'R': 0.62},
|
||||
'S': {'U': 'U', 'C': 'C'},
|
||||
'C': {'N': 0, 'L': 0.22, 'H': 0.56},
|
||||
'I': {'N': 0, 'L': 0.22, 'H': 0.56},
|
||||
'A': {'N': 0, 'L': 0.22, 'H': 0.56},
|
||||
}
|
||||
CVSS3_METRICS_TEMPORAL = {
|
||||
'E': {'X': 1, 'H': 1, 'F': 0.97, 'P': 0.94, 'U': 0.91},
|
||||
'RL': {'X': 1, 'U': 1, 'W': 0.97, 'T': 0.96, 'O': 0.95},
|
||||
'RC': {'X': 1, 'C': 1, 'R': 0.96, 'U': 0.92},
|
||||
}
|
||||
CVSS3_METRICS_ENVIRONMENTAL = {
|
||||
'CR': {'X': 1, 'L': 0.5, 'M': 1, 'H': 1.5},
|
||||
'IR': {'X': 1, 'L': 0.5, 'M': 1, 'H': 1.5},
|
||||
'AR': {'X': 1, 'L': 0.5, 'M': 1, 'H': 1.5},
|
||||
'MAV': {'X': None, 'N': 0.85, 'A': 0.62, 'L': 0.55, 'P': 0.2},
|
||||
'MAC': {'X': None, 'L': 0.77, 'H': 0.44},
|
||||
'MPR': {'X': None, 'N': {'U': 0.85, 'C': 0.85}, 'L': {'U': 0.62, 'C': 0.68}, 'H': {'U': 0.27, 'C': 0.5}},
|
||||
'MUI': {'X': None, 'N': 0.85, 'R': 0.62},
|
||||
'MS': {'X': None, 'U': 'U', 'C': 'C'},
|
||||
'MC': {'X': None, 'N': 0, 'L': 0.22, 'H': 0.56},
|
||||
'MI': {'X': None, 'N': 0, 'L': 0.22, 'H': 0.56},
|
||||
'MA': {'X': None, 'N': 0, 'L': 0.22, 'H': 0.56},
|
||||
}
|
||||
CVSS3_METRICS = CVSS3_METRICS_BASE | CVSS3_METRICS_TEMPORAL | CVSS3_METRICS_ENVIRONMENTAL
|
||||
|
||||
CVSS2_REGEX = re.compile(r'(/?[A-Za-z]+:[A-Z]+)+')
|
||||
CVSS2_METRICS = {
|
||||
'AV': {'L': 0.395, 'A': 0.646, 'N': 1.0},
|
||||
'AC': {'H': 0.35, 'M': 0.61, 'L': 0.71},
|
||||
'Au': {'M': 0.45, 'S': 0.56, 'N': 0.71},
|
||||
'C': {'N': 0, 'P': 0.275, 'C': 0.660},
|
||||
'I': {'N': 0, 'P': 0.275, 'C': 0.660},
|
||||
'A': {'N': 0, 'P': 0.275, 'C': 0.660},
|
||||
|
||||
'E': {'ND': 1, 'H': 1, 'F': 0.95, 'P': 0.90, 'U': 0.85},
|
||||
'RL': {'ND': 1, 'U': 1, 'W': 0.95, 'TF': 0.90, 'OF': 0.87},
|
||||
'RC': {'ND': 1, 'C': 1, 'UR': 0.95, 'UC': 0.90},
|
||||
|
||||
'CDP': {'ND': 0, 'N': 0, 'L': 0.1, 'LM': 0.3, 'MH': 0.4, 'H': 0.5},
|
||||
'TD': {'ND': 1, 'N': 0, 'L': 0.25, 'M': 0.75, 'H': 1},
|
||||
'CR': {'ND': 1, 'L': 0.5, 'M': 1, 'H': 1.51},
|
||||
'IR': {'ND': 1, 'L': 0.5, 'M': 1, 'H': 1.51},
|
||||
'AR': {'ND': 1, 'L': 0.5, 'M': 1, 'H': 1.51},
|
||||
}
|
||||
CVSS2_REQUIRED_METRICS = ['AV', 'AC', 'Au', 'C', 'I', 'A']
|
||||
|
||||
|
||||
def parse_cvss3(vector, version = '3.0'):
|
||||
"""
|
||||
Parses CVSS3.0 and CVSS3.1 vectors.
|
||||
For CVSS 3.0 and 3.1 the metrics are the same. Only descriptions and definitions changed.
|
||||
"""
|
||||
if not vector or not CVSS3_REGEX.match(vector) or not vector.startswith('CVSS:' + version):
|
||||
raise ValidationError(f'Invalid CVSS:{version} vector: Invalid format')
|
||||
|
||||
# parse CVSS metrics
|
||||
values = dict(map(lambda p: tuple(p.split(':')), filter(None, vector[8:].split('/'))))
|
||||
for k, v in values.items():
|
||||
if k not in CVSS3_METRICS or v not in CVSS3_METRICS[k]:
|
||||
raise ValidationError(f'Invalid CVSS:{version} vector: invalid metric value "{k}:{v}"')
|
||||
|
||||
# Validate required metrics
|
||||
for m in CVSS3_METRICS_BASE.keys():
|
||||
if m not in values:
|
||||
raise ValidationError(f'Invalid CVSS{version} vector: base metric "{m}" missing')
|
||||
|
||||
return values
|
||||
|
||||
|
||||
def is_cvss3_0(vector):
|
||||
try:
|
||||
parse_cvss3(vector, version='3.0')
|
||||
return True
|
||||
except ValidationError:
|
||||
return False
|
||||
|
||||
|
||||
def is_cvss3_1(vector):
|
||||
try:
|
||||
parse_cvss3(vector, version='3.1')
|
||||
return True
|
||||
except ValidationError:
|
||||
return False
|
||||
|
||||
|
||||
def round_up(input):
|
||||
int_input = round(input * 100000)
|
||||
if int_input % 10000 == 0:
|
||||
return int_input / 100000.0
|
||||
else:
|
||||
return (math.floor(int_input / 10000) + 1) / 10.0
|
||||
|
||||
|
||||
def calculate_score_cvss3_0(vector) -> Union[float, None]:
|
||||
try:
|
||||
values = parse_cvss3(vector, version='3.0')
|
||||
except ValidationError:
|
||||
return None
|
||||
|
||||
def metric(name, modified=False) -> Any:
|
||||
# First try modified metric, then original metric, then X (Not Definied)
|
||||
if modified:
|
||||
m = CVSS3_METRICS.get('M' + name, {}).get(values.get('M' + name))
|
||||
if m is not None and m != 'X':
|
||||
return m
|
||||
m = CVSS3_METRICS.get(name, {}).get(values.get(name))
|
||||
if m is not None:
|
||||
return m
|
||||
return CVSS3_METRICS.get(name, {}).get('X')
|
||||
|
||||
# Environmental Score calculation (this is the final score shown to the user)
|
||||
scope_changed = metric('S', modified=True) == 'C'
|
||||
isc = min(1 - (
|
||||
(1 - metric('C', modified=True) * metric('CR')) *
|
||||
(1 - metric('I', modified=True) * metric('IR')) *
|
||||
(1 - metric('A', modified=True) * metric('AR'))
|
||||
), 0.915)
|
||||
impact = 7.52 * (isc - 0.029) - 3.25 * pow(isc - 0.02, 15) if scope_changed else \
|
||||
6.42 * isc
|
||||
exploitability = 8.22 * metric('AV', modified=True) * metric('AC', modified=True) * metric('PR', modified=True)[metric('S', modified=True)] * metric('UI', modified=True)
|
||||
score = 0.0 if impact <= 0 else (
|
||||
round_up(min(1.08 * (impact + exploitability), 10)) if scope_changed else
|
||||
round_up(min(impact + exploitability, 10))
|
||||
)
|
||||
score = round_up(score * metric('E') * metric('RL') * metric('RC'))
|
||||
return score
|
||||
|
||||
|
||||
def calculate_score_cvss3_1(vector) -> Union[float, None]:
|
||||
try:
|
||||
values = parse_cvss3(vector, version='3.1')
|
||||
except ValidationError:
|
||||
return None
|
||||
|
||||
def has_metric_group(group):
|
||||
return any(map(lambda m: m in values and values[m] != 'X', group.keys()))
|
||||
|
||||
def metric(name, modified=False) -> Any:
|
||||
# First try modified metric, then original metric, then X (Not Definied)
|
||||
if modified:
|
||||
m = CVSS3_METRICS.get('M' + name, {}).get(values.get('M' + name))
|
||||
if m is not None and m != 'X':
|
||||
return m
|
||||
m = CVSS3_METRICS.get(name, {}).get(values.get(name))
|
||||
if m is not None:
|
||||
return m
|
||||
return CVSS3_METRICS.get(name, {}).get('X')
|
||||
|
||||
# Environmental score
|
||||
if has_metric_group(CVSS3_METRICS_ENVIRONMENTAL):
|
||||
m_scope_changed = metric('S', modified=True) == 'C'
|
||||
miss = min(1 - (
|
||||
(1 - metric('C', modified=True) * metric('CR')) *
|
||||
(1 - metric('I', modified=True) * metric('IR')) *
|
||||
(1 - metric('A', modified=True) * metric('AR'))
|
||||
), 0.915)
|
||||
m_impact = 7.52 * (miss - 0.029) - 3.25 * pow(miss * 0.9731 - 0.02, 13) if m_scope_changed else \
|
||||
6.42 * miss
|
||||
m_exploitability = 8.22 * metric('AV', modified=True) * metric('AC', modified=True) * metric('PR', modified=True)[metric('S', modified=True)] * metric('UI', modified=True)
|
||||
env_score = 0.0 if m_impact <= 0 else (
|
||||
round_up(round_up(min(1.08 * (m_impact + m_exploitability), 10)) * metric('E') * metric('RL') * metric('RC')) if m_scope_changed else
|
||||
round_up(round_up(min(m_impact + m_exploitability, 10)) * metric('E') * metric('RL') * metric('RC'))
|
||||
)
|
||||
return env_score
|
||||
|
||||
# Base score
|
||||
scope_changed = metric('S') == 'C'
|
||||
iss = 1 - ((1 - metric('C')) * (1 - metric('I')) *(1 - metric('A')))
|
||||
impact = (7.52 * (iss - 0.029) - 3.25 * pow(iss - 0.02, 15)) if scope_changed else \
|
||||
6.42 * iss
|
||||
exploitability = 8.22 * metric('AV') * metric('AC') * metric('PR')[metric('S')] * metric('UI')
|
||||
score = 0.0 if impact <= 0 else (
|
||||
round_up(min(1.08 * (impact + exploitability), 10)) if scope_changed else
|
||||
round_up(min(impact + exploitability, 10))
|
||||
)
|
||||
|
||||
# Temporal score
|
||||
if has_metric_group(CVSS3_METRICS_TEMPORAL):
|
||||
score = round_up(score * metric('E') * metric('RL') * metric('RC'))
|
||||
return score
|
||||
|
||||
|
||||
def parse_cvss2(vector):
|
||||
# Strip non-standardized prefix
|
||||
vector = (vector or '').replace('CVSS2#', '')
|
||||
|
||||
if not vector or not CVSS2_REGEX.match(vector):
|
||||
raise ValidationError('Invalid CVSS2 vector: Invalid format')
|
||||
|
||||
# parse CVSS metrics
|
||||
values = dict(map(lambda p: tuple(p.split(':')), filter(None, vector.split('/'))))
|
||||
for k, v in values.items():
|
||||
if k not in CVSS2_METRICS or v not in CVSS2_METRICS[k]:
|
||||
raise ValidationError(f'Invalid CVSS2 vector: invalid metric value "{k}:{v}"')
|
||||
|
||||
# Validate required metrics
|
||||
for m in CVSS2_REQUIRED_METRICS:
|
||||
if m not in values:
|
||||
raise ValidationError(f'Invalid CVSS2 vector: base metric "{m}" missing')
|
||||
|
||||
return values
|
||||
|
||||
|
||||
def is_cvss2(vector):
|
||||
try:
|
||||
parse_cvss2(vector)
|
||||
return True
|
||||
except ValidationError:
|
||||
return False
|
||||
|
||||
|
||||
def calculate_score_cvss2(vector):
|
||||
try:
|
||||
values = parse_cvss2(vector)
|
||||
except ValidationError:
|
||||
return None
|
||||
|
||||
def metric(name):
|
||||
m = CVSS2_METRICS.get(name, {}).get(values.get(name))
|
||||
if m is not None:
|
||||
return m
|
||||
return CVSS2_METRICS.get(name, {}).get('ND')
|
||||
|
||||
def round_up(inp):
|
||||
return round(inp, ndigits=1)
|
||||
|
||||
# Environmental Score calculation (this is the final score shown to the user)
|
||||
adjusted_impact = min(10.41 * (1 - (
|
||||
(1 - metric('C') * metric('CR')) *
|
||||
(1 - metric('I') * metric('IR')) *
|
||||
(1 - metric('A') * metric('AR')))
|
||||
), 10)
|
||||
exploitability = 20 * metric('AV') * metric('AC') * metric('Au')
|
||||
adjusted_base_score = round_up(
|
||||
((0.6 * adjusted_impact) + (0.4 * exploitability) - 1.5) *
|
||||
(0 if adjusted_impact == 0 else 1.176))
|
||||
adjusted_temporal = round_up(adjusted_base_score * metric('E') * metric('RL') * metric('RC'))
|
||||
environmental_score = round_up((adjusted_temporal + (10 - adjusted_temporal) * metric('CDP')) * metric('TD'))
|
||||
return environmental_score
|
||||
|
||||
|
||||
def is_cvss(vector):
|
||||
return is_cvss3_1(vector) or is_cvss3_0(vector) or is_cvss2(vector)
|
||||
|
||||
|
||||
def calculate_score(vector) -> float:
|
||||
"""
|
||||
Calculate the CVSS score from a CVSS vector.
|
||||
Supports CVSS v2, v3.0 and v3.1
|
||||
"""
|
||||
if (score := calculate_score_cvss3_1(vector)) is not None:
|
||||
return score
|
||||
elif (score := calculate_score_cvss3_0(vector)) is not None:
|
||||
return score
|
||||
elif (score := calculate_score_cvss2(vector)) is not None:
|
||||
return score
|
||||
return 0.0
|
||||
|
||||
|
||||
def level_from_score(score: float) -> CVSSLevel:
|
||||
"""
|
||||
Calculate the CVSS level from a CVSS score.
|
||||
"""
|
||||
if score >= 9.0:
|
||||
return CVSSLevel.CRITICAL
|
||||
elif score >= 7.0:
|
||||
return CVSSLevel.HIGH
|
||||
elif score >= 4.0:
|
||||
return CVSSLevel.MEDIUM
|
||||
elif score > 0:
|
||||
return CVSSLevel.LOW
|
||||
else:
|
||||
return CVSSLevel.INFO
|
||||
|
||||
|
||||
def level_number_from_score(score: float) -> int:
|
||||
if score >= 9.0:
|
||||
return 5
|
||||
elif score >= 7.0:
|
||||
return 4
|
||||
elif score >= 4.0:
|
||||
return 3
|
||||
elif score > 0:
|
||||
return 2
|
||||
else:
|
||||
return 1
|
|
@ -0,0 +1,122 @@
|
|||
# Generated by Django 4.0.4 on 2022-07-08 11:08
|
||||
|
||||
import django.core.serializers.json
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
import reportcreator_api.pentests.customfields.mixins
|
||||
import reportcreator_api.pentests.customfields.validators
|
||||
import reportcreator_api.pentests.customfields.predefined_fields
|
||||
import reportcreator_api.pentests.models
|
||||
import reportcreator_api.pentests.storages
|
||||
import reportcreator_api.utils.models
|
||||
import uuid
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
initial = True
|
||||
|
||||
dependencies = [
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='PentestFinding',
|
||||
fields=[
|
||||
('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
|
||||
('created', models.DateTimeField(auto_now_add=True)),
|
||||
('updated', models.DateTimeField(auto_now=True)),
|
||||
('title', models.TextField(default='')),
|
||||
('cvss', models.CharField(default='n/a', max_length=50)),
|
||||
('risk_score', models.FloatField(default=0.0)),
|
||||
('risk_level', models.CharField(choices=[('none', 'None'), ('low', 'Low'), ('medium', 'Medium'), ('high', 'High'), ('critical', 'Critical')], default='none', max_length=10)),
|
||||
('custom_fields', models.JSONField(default=dict, encoder=django.core.serializers.json.DjangoJSONEncoder)),
|
||||
],
|
||||
options={
|
||||
'ordering': ['-created'],
|
||||
'abstract': False,
|
||||
},
|
||||
bases=(reportcreator_api.pentests.customfields.mixins.CustomFieldsMixin, models.Model, reportcreator_api.utils.models.ModelDiffMixin),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='PentestProject',
|
||||
fields=[
|
||||
('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
|
||||
('created', models.DateTimeField(auto_now_add=True)),
|
||||
('updated', models.DateTimeField(auto_now=True)),
|
||||
('name', models.CharField(max_length=255)),
|
||||
],
|
||||
options={
|
||||
'ordering': ['-created'],
|
||||
'abstract': False,
|
||||
},
|
||||
bases=(models.Model, reportcreator_api.utils.models.ModelDiffMixin),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='PentestReport',
|
||||
fields=[
|
||||
('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
|
||||
('created', models.DateTimeField(auto_now_add=True)),
|
||||
('updated', models.DateTimeField(auto_now=True)),
|
||||
('title', models.TextField(default='')),
|
||||
('custom_fields', models.JSONField(default=dict, encoder=django.core.serializers.json.DjangoJSONEncoder)),
|
||||
],
|
||||
options={
|
||||
'ordering': ['-created'],
|
||||
'abstract': False,
|
||||
},
|
||||
bases=(reportcreator_api.pentests.customfields.mixins.CustomFieldsMixin, models.Model, reportcreator_api.utils.models.ModelDiffMixin),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='ProjectType',
|
||||
fields=[
|
||||
('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
|
||||
('created', models.DateTimeField(auto_now_add=True)),
|
||||
('updated', models.DateTimeField(auto_now=True)),
|
||||
('name', models.CharField(max_length=255)),
|
||||
('report_template', models.TextField(default='')),
|
||||
('report_styles', models.TextField(default='')),
|
||||
('report_preview_data', models.JSONField(default=dict, encoder=django.core.serializers.json.DjangoJSONEncoder)),
|
||||
('report_fields', models.JSONField(default=reportcreator_api.pentests.customfields.predefined_fields.report_fields_default, encoder=django.core.serializers.json.DjangoJSONEncoder, validators=[reportcreator_api.pentests.customfields.validators.FieldDefinitionValidator({'title': {'static': True, 'type': 'string'}})])),
|
||||
('report_sections', models.JSONField(default=reportcreator_api.pentests.customfields.predefined_fields.report_sections_default, encoder=django.core.serializers.json.DjangoJSONEncoder, validators=[reportcreator_api.pentests.customfields.validators.SectionDefinitionValidator()])),
|
||||
('finding_fields', models.JSONField(default=reportcreator_api.pentests.customfields.predefined_fields.finding_fields_default, encoder=django.core.serializers.json.DjangoJSONEncoder, validators=[reportcreator_api.pentests.customfields.validators.FieldDefinitionValidator({'cvss': {'static': True, 'type': 'cvss'}, 'title': {'static': True, 'type': 'string'}})])),
|
||||
('finding_field_order', models.JSONField(default=reportcreator_api.pentests.customfields.predefined_fields.finding_field_order_default, encoder=django.core.serializers.json.DjangoJSONEncoder)),
|
||||
],
|
||||
options={
|
||||
'ordering': ['-created'],
|
||||
'abstract': False,
|
||||
},
|
||||
bases=(models.Model, reportcreator_api.utils.models.ModelDiffMixin),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='UploadedImage',
|
||||
fields=[
|
||||
('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
|
||||
('created', models.DateTimeField(auto_now_add=True)),
|
||||
('updated', models.DateTimeField(auto_now=True)),
|
||||
('image', models.ImageField(storage=reportcreator_api.pentests.storages.get_uploaded_image_storage, upload_to='')),
|
||||
('project', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='pentests.pentestproject')),
|
||||
],
|
||||
options={
|
||||
'ordering': ['-created'],
|
||||
'abstract': False,
|
||||
},
|
||||
bases=(models.Model, reportcreator_api.utils.models.ModelDiffMixin),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='UploadedAsset',
|
||||
fields=[
|
||||
('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
|
||||
('created', models.DateTimeField(auto_now_add=True)),
|
||||
('updated', models.DateTimeField(auto_now=True)),
|
||||
('name', models.CharField(max_length=255)),
|
||||
('file', models.FileField(storage=reportcreator_api.pentests.storages.get_uploaded_asset_storage, upload_to='')),
|
||||
('project_type', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='pentests.projecttype')),
|
||||
],
|
||||
options={
|
||||
'ordering': ['-created'],
|
||||
'abstract': False,
|
||||
},
|
||||
bases=(models.Model, reportcreator_api.utils.models.ModelDiffMixin),
|
||||
),
|
||||
]
|
|
@ -0,0 +1,38 @@
|
|||
# Generated by Django 4.0.4 on 2022-07-08 11:08
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
initial = True
|
||||
|
||||
dependencies = [
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
('pentests', '0001_initial'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='pentestproject',
|
||||
name='pentesters',
|
||||
field=models.ManyToManyField(limit_choices_to=models.Q(('is_active', True)), to=settings.AUTH_USER_MODEL),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='pentestproject',
|
||||
name='project_type',
|
||||
field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='pentests.projecttype'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='pentestproject',
|
||||
name='report',
|
||||
field=models.OneToOneField(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='project', to='pentests.pentestreport'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='pentestfinding',
|
||||
name='report',
|
||||
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='pentests.pentestreport'),
|
||||
),
|
||||
]
|
|
@ -0,0 +1,99 @@
|
|||
# Generated by Django 4.0.4 on 2022-07-20 17:08
|
||||
|
||||
import django.core.serializers.json
|
||||
from django.db import migrations, models
|
||||
import reportcreator_api.pentests.customfields.mixins
|
||||
import reportcreator_api.pentests.customfields.predefined_fields
|
||||
import reportcreator_api.pentests.customfields.types
|
||||
import reportcreator_api.pentests.customfields.utils
|
||||
import reportcreator_api.pentests.customfields.validators
|
||||
import reportcreator_api.utils.models
|
||||
import uuid
|
||||
|
||||
|
||||
def migrate_field_definition_static_to_origin(apps, schema_editor):
|
||||
ProjectType = apps.get_model('pentests', 'projecttype')
|
||||
for pt in ProjectType.objects.all():
|
||||
# Load and serialize definition to update format to current structure
|
||||
pt.finding_fields = reportcreator_api.pentests.customfields.types.field_definition_to_dict(
|
||||
reportcreator_api.pentests.customfields.utils.set_field_origin(
|
||||
reportcreator_api.pentests.customfields.types.parse_field_definition(pt.finding_fields),
|
||||
reportcreator_api.pentests.customfields.predefined_fields.FINDING_FIELDS_CORE | reportcreator_api.pentests.customfields.predefined_fields.FINDING_FIELDS_PREDEFINED))
|
||||
pt.report_fields = reportcreator_api.pentests.customfields.types.field_definition_to_dict(
|
||||
reportcreator_api.pentests.customfields.utils.set_field_origin(
|
||||
reportcreator_api.pentests.customfields.types.parse_field_definition(pt.report_fields),
|
||||
reportcreator_api.pentests.customfields.predefined_fields.REPORT_FIELDS_CORE | reportcreator_api.pentests.customfields.predefined_fields.REPORT_FIELDS_PREDEFINED))
|
||||
pt.save()
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('pentests', '0002_initial'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='FindingTemplate',
|
||||
fields=[
|
||||
('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
|
||||
('created', models.DateTimeField(auto_now_add=True)),
|
||||
('updated', models.DateTimeField(auto_now=True)),
|
||||
('usage_count', models.PositiveIntegerField(db_index=True, default=0)),
|
||||
('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=255), db_index=True, default=list, size=None)),
|
||||
('title', models.TextField(db_index=True, default='')),
|
||||
('cvss', models.CharField(default='n/a', max_length=50)),
|
||||
('custom_fields', models.JSONField(default=dict, encoder=django.core.serializers.json.DjangoJSONEncoder)),
|
||||
],
|
||||
options={
|
||||
'ordering': ['-usage_count', '-created'],
|
||||
},
|
||||
bases=(reportcreator_api.pentests.customfields.mixins.CustomFieldsMixin, models.Model, reportcreator_api.utils.models.ModelDiffMixin),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='projecttype',
|
||||
name='finding_fields',
|
||||
field=models.JSONField(default=reportcreator_api.pentests.customfields.predefined_fields.finding_fields_default, encoder=django.core.serializers.json.DjangoJSONEncoder, validators=[reportcreator_api.pentests.customfields.validators.FieldDefinitionValidator(core_fields=reportcreator_api.pentests.customfields.predefined_fields.FINDING_FIELDS_CORE, predefined_fields=reportcreator_api.pentests.customfields.predefined_fields.FINDING_FIELDS_PREDEFINED)]),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='projecttype',
|
||||
name='report_fields',
|
||||
field=models.JSONField(default=reportcreator_api.pentests.customfields.predefined_fields.report_fields_default, encoder=django.core.serializers.json.DjangoJSONEncoder, validators=[reportcreator_api.pentests.customfields.validators.FieldDefinitionValidator(core_fields=reportcreator_api.pentests.customfields.predefined_fields.REPORT_FIELDS_CORE, predefined_fields=reportcreator_api.pentests.customfields.predefined_fields.REPORT_FIELDS_PREDEFINED)]),
|
||||
),
|
||||
migrations.RunPython(code=migrate_field_definition_static_to_origin),
|
||||
migrations.AlterField(
|
||||
model_name='pentestfinding',
|
||||
name='risk_level',
|
||||
field=models.CharField(choices=[('none', 'None'), ('low', 'Low'), ('medium', 'Medium'), ('high', 'High'), ('critical', 'Critical')], db_index=True, default='none', max_length=10),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='pentestfinding',
|
||||
name='risk_score',
|
||||
field=models.FloatField(db_index=True, default=0.0),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='pentestfinding',
|
||||
name='title',
|
||||
field=models.TextField(db_index=True, default=''),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='pentestproject',
|
||||
name='name',
|
||||
field=models.CharField(db_index=True, max_length=255),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='pentestreport',
|
||||
name='title',
|
||||
field=models.TextField(db_index=True, default=''),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='projecttype',
|
||||
name='name',
|
||||
field=models.CharField(db_index=True, max_length=255),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='uploadedasset',
|
||||
name='name',
|
||||
field=models.CharField(db_index=True, max_length=255),
|
||||
),
|
||||
]
|
|
@ -0,0 +1,17 @@
|
|||
# Generated by Django 4.0.4 on 2022-07-21 14:32
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('pentests', '0003_findingtemplate_and_more'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterUniqueTogether(
|
||||
name='uploadedasset',
|
||||
unique_together={('project_type', 'name')},
|
||||
),
|
||||
]
|
|
@ -0,0 +1,66 @@
|
|||
# Generated by Django 4.0.4 on 2022-08-13 12:27
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
import django.utils.timezone
|
||||
import reportcreator_api.pentests.models
|
||||
import reportcreator_api.utils.models
|
||||
import uuid
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('contenttypes', '0002_remove_content_type_name'),
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
('pentests', '0004_alter_uploadedasset_unique_together'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='ReportSection',
|
||||
fields=[
|
||||
('id', models.CharField(max_length=255)),
|
||||
('virtual_pk', models.UUIDField(primary_key=True, serialize=False)),
|
||||
],
|
||||
options={
|
||||
'managed': False,
|
||||
},
|
||||
bases=(reportcreator_api.pentests.models.LockableMixin, models.Model),
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name='findingtemplate',
|
||||
options={'ordering': ['-usage_count', '-risk_score', '-created']},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name='pentestfinding',
|
||||
options={'ordering': ['-risk_score', '-created']},
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='findingtemplate',
|
||||
name='risk_level',
|
||||
field=models.CharField(choices=[('none', 'None'), ('low', 'Low'), ('medium', 'Medium'), ('high', 'High'), ('critical', 'Critical')], db_index=True, default='none', max_length=10),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='findingtemplate',
|
||||
name='risk_score',
|
||||
field=models.FloatField(db_index=True, default=0.0),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='LockInfo',
|
||||
fields=[
|
||||
('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
|
||||
('created', models.DateTimeField(auto_now_add=True)),
|
||||
('updated', models.DateTimeField(auto_now=True)),
|
||||
('object_id', models.UUIDField()),
|
||||
('last_ping', models.DateTimeField(default=django.utils.timezone.now)),
|
||||
('content_type', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='contenttypes.contenttype')),
|
||||
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
|
||||
],
|
||||
options={
|
||||
'unique_together': {('content_type', 'object_id')},
|
||||
},
|
||||
bases=(models.Model, reportcreator_api.utils.models.ModelDiffMixin),
|
||||
),
|
||||
]
|
|
@ -0,0 +1,66 @@
|
|||
# Generated by Django 4.0.4 on 2022-08-22 09:18
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
def generate_uploadedimage_name(apps, schema_editor):
|
||||
UploadedImage = apps.get_model('pentests', 'UploadedImage')
|
||||
imgs = list(UploadedImage.objects.all())
|
||||
for img in imgs:
|
||||
img.name = img.file.name
|
||||
UploadedImage.objects.bulk_update(imgs, fields=['name'])
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
('pentests', '0005_reportsection_alter_findingtemplate_options_and_more'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RenameField(
|
||||
model_name='uploadedasset',
|
||||
old_name='project_type',
|
||||
new_name='linked_object',
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='uploadedasset',
|
||||
name='uploaded_by',
|
||||
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL),
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name='uploadedasset',
|
||||
unique_together={('linked_object', 'name')},
|
||||
),
|
||||
|
||||
migrations.RenameField(
|
||||
model_name='uploadedimage',
|
||||
old_name='image',
|
||||
new_name='file',
|
||||
),
|
||||
migrations.RenameField(
|
||||
model_name='uploadedimage',
|
||||
old_name='project',
|
||||
new_name='linked_object',
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='uploadedimage',
|
||||
name='uploaded_by',
|
||||
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='uploadedimage',
|
||||
name='name',
|
||||
field=models.CharField(db_index=True, default='', max_length=255),
|
||||
),
|
||||
migrations.RunPython(
|
||||
code=generate_uploadedimage_name, reverse_code=migrations.RunPython.noop,
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name='uploadedimage',
|
||||
unique_together={('linked_object', 'name')},
|
||||
),
|
||||
]
|
|
@ -0,0 +1,22 @@
|
|||
# Generated by Django 4.0.7 on 2022-08-24 13:56
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('pentests', '0006_rename_project_type_uploadedasset_linked_object_and_more'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterModelOptions(
|
||||
name='findingtemplate',
|
||||
options={},
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='uploadedimage',
|
||||
name='name',
|
||||
field=models.CharField(db_index=True, max_length=255),
|
||||
),
|
||||
]
|
|
@ -0,0 +1,28 @@
|
|||
# Generated by Django 4.0.7 on 2022-08-26 13:20
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('pentests', '0007_alter_findingtemplate_options_and_more'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterModelOptions(
|
||||
name='projecttype',
|
||||
options={},
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='uploadedasset',
|
||||
name='linked_object',
|
||||
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='assets', to='pentests.projecttype'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='uploadedimage',
|
||||
name='linked_object',
|
||||
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='images', to='pentests.pentestproject'),
|
||||
),
|
||||
]
|
|
@ -0,0 +1,19 @@
|
|||
# Generated by Django 4.0.7 on 2022-08-29 08:22
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('pentests', '0008_alter_projecttype_options_and_more'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='pentestproject',
|
||||
name='report',
|
||||
field=models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='project', to='pentests.pentestreport'),
|
||||
),
|
||||
]
|
|
@ -0,0 +1,32 @@
|
|||
# Generated by Django 4.0.7 on 2022-08-29 14:39
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('pentests', '0009_alter_pentestproject_report'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterModelOptions(
|
||||
name='pentestproject',
|
||||
options={},
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='findingtemplate',
|
||||
name='language',
|
||||
field=models.CharField(choices=[('en-US', 'English'), ('de-DE', 'German')], db_index=True, default='de-DE', max_length=5),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='pentestproject',
|
||||
name='language',
|
||||
field=models.CharField(choices=[('en-US', 'English'), ('de-DE', 'German')], db_index=True, default='de-DE', max_length=5),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='projecttype',
|
||||
name='language',
|
||||
field=models.CharField(choices=[('en-US', 'English'), ('de-DE', 'German')], db_index=True, default='de-DE', max_length=5),
|
||||
),
|
||||
]
|
|
@ -0,0 +1,45 @@
|
|||
# Generated by Django 4.0.7 on 2022-08-31 07:44
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
from reportcreator_api.pentests.models import ReportSection
|
||||
import reportcreator_api.utils.models
|
||||
import uuid
|
||||
|
||||
|
||||
def migrate_create_sections(apps, schema_editor):
|
||||
ReportSection = apps.get_model('pentests', 'ReportSection')
|
||||
PentestProject = apps.get_model('pentests', 'PentestProject')
|
||||
|
||||
sections = []
|
||||
for p in PentestProject.objects.select_related('report', 'project_type').all():
|
||||
sections.extend([ReportSection(report=p.report, section_id=s.get('id')) for s in p.project_type.report_sections])
|
||||
ReportSection.objects.bulk_create(sections)
|
||||
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('pentests', '0010_alter_pentestproject_options_and_more'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='ReportSection',
|
||||
fields=[
|
||||
('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
|
||||
('created', models.DateTimeField(auto_now_add=True)),
|
||||
('updated', models.DateTimeField(auto_now=True)),
|
||||
('section_id', models.CharField(db_index=True, max_length=255)),
|
||||
('report', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='pentests.pentestreport')),
|
||||
],
|
||||
options={
|
||||
'ordering': ['-created'],
|
||||
'abstract': False,
|
||||
'unique_together': {('report', 'section_id')},
|
||||
},
|
||||
bases=(reportcreator_api.utils.models.ModelDiffMixin, models.Model),
|
||||
),
|
||||
migrations.RunPython(code=migrate_create_sections, reverse_code=migrations.RunPython.noop),
|
||||
]
|
|
@ -0,0 +1,26 @@
|
|||
# Generated by Django 4.0.7 on 2022-08-31 08:46
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
('pentests', '0011_reportsection'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='pentestfinding',
|
||||
name='assignee',
|
||||
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='reportsection',
|
||||
name='assignee',
|
||||
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL),
|
||||
),
|
||||
]
|
|
@ -0,0 +1,52 @@
|
|||
# Generated by Django 4.0.7 on 2022-09-20 09:55
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
def rename_risklevel_none_to_info(apps, schema_editor):
|
||||
PentestFinding = apps.get_model('pentests', 'PentestFinding')
|
||||
FindingTemplate = apps.get_model('pentests', 'FindingTemplate')
|
||||
|
||||
PentestFinding.objects \
|
||||
.filter(risk_level='none') \
|
||||
.update(risk_level='info')
|
||||
FindingTemplate.objects \
|
||||
.filter(risk_level='none') \
|
||||
.update(risk_level='info')
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('pentests', '0012_pentestfinding_assignee_reportsection_assignee'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='pentestfinding',
|
||||
name='template',
|
||||
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='pentests.findingtemplate'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='findingtemplate',
|
||||
name='risk_level',
|
||||
field=models.CharField(choices=[('info', 'Info'), ('low', 'Low'), ('medium', 'Medium'), ('high', 'High'), ('critical', 'Critical')], db_index=True, default='info', max_length=10),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='pentestfinding',
|
||||
name='report',
|
||||
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='findings', to='pentests.pentestreport'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='pentestfinding',
|
||||
name='risk_level',
|
||||
field=models.CharField(choices=[('info', 'Info'), ('low', 'Low'), ('medium', 'Medium'), ('high', 'High'), ('critical', 'Critical')], db_index=True, default='info', max_length=10),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='reportsection',
|
||||
name='report',
|
||||
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='sections', to='pentests.pentestreport'),
|
||||
),
|
||||
migrations.RunPython(code=rename_risklevel_none_to_info),
|
||||
]
|
|
@ -0,0 +1,25 @@
|
|||
# Generated by Django 4.0.7 on 2022-09-22 13:37
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
from reportcreator_api.pentests.customfields.types import field_definition_to_dict, parse_field_definition
|
||||
|
||||
|
||||
def migrate_add_required_to_field_definitions(apps, schema_editor):
|
||||
ProjectType = apps.get_model('pentests', 'ProjectType')
|
||||
project_types = list(ProjectType.objects.all())
|
||||
for project_type in project_types:
|
||||
project_type.finding_fields = field_definition_to_dict(parse_field_definition(project_type.finding_fields))
|
||||
project_type.report_fields = field_definition_to_dict(parse_field_definition(project_type.report_fields))
|
||||
ProjectType.objects.bulk_update(project_types, fields=['finding_fields', 'report_fields'])
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('pentests', '0013_pentestfinding_template_and_more'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(code=migrate_add_required_to_field_definitions, reverse_code=migrations.RunPython.noop),
|
||||
]
|
|
@ -0,0 +1,23 @@
|
|||
# Generated by Django 4.0.7 on 2022-10-18 10:28
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('pentests', '0014_auto_20220922_1337'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='findingtemplate',
|
||||
name='cvss',
|
||||
field=models.CharField(default='n/a', max_length=255),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='pentestfinding',
|
||||
name='cvss',
|
||||
field=models.CharField(default='n/a', max_length=255),
|
||||
),
|
||||
]
|
|
@ -0,0 +1,117 @@
|
|||
# Generated by Django 4.0.7 on 2022-10-13 08:10
|
||||
|
||||
import uuid
|
||||
from django.conf import settings
|
||||
import django.contrib.postgres.fields
|
||||
from django.db import migrations, models
|
||||
import reportcreator_api.utils.models
|
||||
|
||||
|
||||
def migrate_set_finding_ids(apps, schema_editor):
|
||||
PentestFinding = apps.get_model('pentests', 'PentestFinding')
|
||||
PentestFinding.objects.update(finding_id=models.F('id'))
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('pentests', '0015_alter_findingtemplate_cvss_alter_pentestfinding_cvss'),
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterModelOptions(
|
||||
name='pentestreport',
|
||||
options={},
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='pentestproject',
|
||||
name='imported_pentesters',
|
||||
field=django.contrib.postgres.fields.ArrayField(base_field=models.JSONField(encoder=django.core.serializers.json.DjangoJSONEncoder), default=list, size=None),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='findingtemplate',
|
||||
name='created',
|
||||
field=models.DateTimeField(default=reportcreator_api.utils.models.now, editable=False),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='lockinfo',
|
||||
name='created',
|
||||
field=models.DateTimeField(default=reportcreator_api.utils.models.now, editable=False),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='pentestfinding',
|
||||
name='created',
|
||||
field=models.DateTimeField(default=reportcreator_api.utils.models.now, editable=False),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='pentestproject',
|
||||
name='created',
|
||||
field=models.DateTimeField(default=reportcreator_api.utils.models.now, editable=False),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='pentestreport',
|
||||
name='created',
|
||||
field=models.DateTimeField(default=reportcreator_api.utils.models.now, editable=False),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='projecttype',
|
||||
name='created',
|
||||
field=models.DateTimeField(default=reportcreator_api.utils.models.now, editable=False),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='reportsection',
|
||||
name='created',
|
||||
field=models.DateTimeField(default=reportcreator_api.utils.models.now, editable=False),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='uploadedasset',
|
||||
name='created',
|
||||
field=models.DateTimeField(default=reportcreator_api.utils.models.now, editable=False),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='uploadedimage',
|
||||
name='created',
|
||||
field=models.DateTimeField(default=reportcreator_api.utils.models.now, editable=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='findingtemplate',
|
||||
name='imported',
|
||||
field=models.BooleanField(db_index=True, default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='pentestproject',
|
||||
name='imported',
|
||||
field=models.BooleanField(db_index=True, default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='projecttype',
|
||||
name='imported',
|
||||
field=models.BooleanField(db_index=True, default=False),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='pentestproject',
|
||||
name='pentesters',
|
||||
field=models.ManyToManyField(related_name='projects', to=settings.AUTH_USER_MODEL),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='projecttype',
|
||||
name='hidden',
|
||||
field=models.BooleanField(db_index=True, default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='pentestfinding',
|
||||
name='finding_id',
|
||||
field=models.UUIDField(db_index=True, default=uuid.uuid4, editable=False),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='reportsection',
|
||||
name='section_id',
|
||||
field=models.CharField(db_index=True, editable=False, max_length=255),
|
||||
),
|
||||
migrations.RunPython(code=migrate_set_finding_ids),
|
||||
migrations.AlterUniqueTogether(
|
||||
name='pentestfinding',
|
||||
unique_together={('report', 'finding_id')},
|
||||
),
|
||||
]
|
|
@ -0,0 +1,18 @@
|
|||
# Generated by Django 4.0.7 on 2022-10-18 14:29
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('pentests', '0016_alter_pentestreport_options_and_more'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='pentestproject',
|
||||
name='readonly',
|
||||
field=models.BooleanField(db_index=True, default=False),
|
||||
),
|
||||
]
|
|
@ -0,0 +1,69 @@
|
|||
# Generated by Django 4.0.7 on 2022-10-23 08:47
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
from reportcreator_api.pentests.models import SourceEnum
|
||||
|
||||
|
||||
def migrate_to_source_enum(apps, schema_editor):
|
||||
ProjectType = apps.get_model('pentests', 'ProjectType')
|
||||
FindingTemplate = apps.get_model('pentests', 'FindingTemplate')
|
||||
PentestProject = apps.get_model('pentests', 'PentestProject')
|
||||
|
||||
FindingTemplate.objects \
|
||||
.filter(imported=True) \
|
||||
.update(source=SourceEnum.IMPORTED)
|
||||
PentestProject.objects \
|
||||
.filter(imported=True) \
|
||||
.update(source=SourceEnum.IMPORTED)
|
||||
ProjectType.objects \
|
||||
.filter(imported=True) \
|
||||
.filter(hidden=False) \
|
||||
.update(source=SourceEnum.IMPORTED)
|
||||
ProjectType.objects \
|
||||
.filter(imported=True) \
|
||||
.filter(hidden=True) \
|
||||
.update(source=SourceEnum.IMPORTED_DEPENDENCY)
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('pentests', '0017_pentestproject_readonly'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='findingtemplate',
|
||||
name='source',
|
||||
field=models.CharField(choices=[('created', 'Created'), ('imported', 'Imported'), ('imported_dependency', 'Imported Dependency'), ('customized', 'Customized')], db_index=True, default='created', editable=False, max_length=50),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='pentestproject',
|
||||
name='source',
|
||||
field=models.CharField(choices=[('created', 'Created'), ('imported', 'Imported'), ('imported_dependency', 'Imported Dependency'), ('customized', 'Customized')], db_index=True, default='created', editable=False, max_length=50),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='projecttype',
|
||||
name='source',
|
||||
field=models.CharField(choices=[('created', 'Created'), ('imported', 'Imported'), ('imported_dependency', 'Imported Dependency'), ('customized', 'Customized')], db_index=True, default='created', editable=False, max_length=50),
|
||||
),
|
||||
migrations.RunPython(code=migrate_to_source_enum),
|
||||
migrations.RemoveField(
|
||||
model_name='findingtemplate',
|
||||
name='imported',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='pentestproject',
|
||||
name='imported',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='projecttype',
|
||||
name='hidden',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='projecttype',
|
||||
name='imported',
|
||||
),
|
||||
]
|
|
@ -0,0 +1,69 @@
|
|||
# Generated by Django 4.0.7 on 2022-10-23 10:53
|
||||
|
||||
import django.core.serializers.json
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
def migrate_report_to_project(apps, schema_editor):
|
||||
PentestReport = apps.get_model('pentests', 'PentestReport')
|
||||
PentestProject = apps.get_model('pentests', 'PentestProject')
|
||||
ReportSection = apps.get_model('pentests', 'ReportSection')
|
||||
PentestFinding = apps.get_model('pentests', 'PentestFinding')
|
||||
|
||||
# Delete unreferenced reports (remainders of deleted projects)
|
||||
PentestReport.objects \
|
||||
.filter(project__isnull=True) \
|
||||
.delete()
|
||||
|
||||
projects = list(PentestProject.objects.select_related('report').all())
|
||||
for p in projects:
|
||||
p.custom_fields = {'title': p.report.title} | p.report.custom_fields
|
||||
PentestProject.objects.bulk_update(projects, ['custom_fields'])
|
||||
|
||||
sections = list(ReportSection.objects.select_related('report__project').all())
|
||||
for s in sections:
|
||||
s.project = s.report.project
|
||||
ReportSection.objects.bulk_update(sections, ['project'])
|
||||
|
||||
findings = list(PentestFinding.objects.select_related('report__project').all())
|
||||
for f in findings:
|
||||
f.project = f.report.project
|
||||
PentestFinding.objects.bulk_update(findings, ['project'])
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('pentests', '0018_remove_findingtemplate_imported_and_more'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterUniqueTogether(
|
||||
name='pentestfinding',
|
||||
unique_together=set(),
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name='reportsection',
|
||||
unique_together=set(),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='pentestfinding',
|
||||
name='project',
|
||||
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='findings', to='pentests.pentestproject'),
|
||||
preserve_default=False,
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='pentestproject',
|
||||
name='custom_fields',
|
||||
field=models.JSONField(default=dict, encoder=django.core.serializers.json.DjangoJSONEncoder),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='reportsection',
|
||||
name='project',
|
||||
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='sections', to='pentests.pentestproject'),
|
||||
preserve_default=False,
|
||||
),
|
||||
|
||||
migrations.RunPython(code=migrate_report_to_project),
|
||||
]
|
|
@ -0,0 +1,47 @@
|
|||
# Generated by Django 4.0.7 on 2022-10-23 11:58
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('pentests', '0019_remove_report_1'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RemoveField(
|
||||
model_name='pentestproject',
|
||||
name='report',
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='pentestfinding',
|
||||
name='project',
|
||||
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='findings', to='pentests.pentestproject'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='reportsection',
|
||||
name='project',
|
||||
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='sections', to='pentests.pentestproject'),
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name='pentestfinding',
|
||||
unique_together={('project', 'finding_id')},
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name='reportsection',
|
||||
unique_together={('project', 'section_id')},
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='pentestfinding',
|
||||
name='report',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='reportsection',
|
||||
name='report',
|
||||
),
|
||||
migrations.DeleteModel(
|
||||
name='PentestReport',
|
||||
),
|
||||
]
|
|
@ -0,0 +1,38 @@
|
|||
# Generated by Django 4.0.7 on 2022-10-23 12:30
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
from reportcreator_api.pentests.models import SourceEnum
|
||||
|
||||
|
||||
def migrate_set_linked_project(apps, schema_editor):
|
||||
ProjectType = apps.get_model('pentests', 'ProjectType')
|
||||
linked_pts = list(ProjectType.objects \
|
||||
.filter(source=SourceEnum.IMPORTED_DEPENDENCY) \
|
||||
.annotate(project_count=models.Count('pentestproject')) \
|
||||
.filter(project_count=1) \
|
||||
.prefetch_related('pentestproject_set'))
|
||||
for pt in linked_pts:
|
||||
p = list(pt.pentestproject_set.all())[0]
|
||||
if p.source != SourceEnum.IMPORTED:
|
||||
continue
|
||||
pt.linked_project = p
|
||||
ProjectType.objects.bulk_update(linked_pts, ['linked_project'])
|
||||
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('pentests', '0020_remove_report_2'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='projecttype',
|
||||
name='linked_project',
|
||||
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='pentests.pentestproject'),
|
||||
),
|
||||
migrations.RunPython(code=migrate_set_linked_project, reverse_code=migrations.RunPython.noop),
|
||||
]
|
|
@ -0,0 +1,27 @@
|
|||
# Generated by Django 4.0.7 on 2022-10-24 13:29
|
||||
|
||||
from django.db import migrations
|
||||
from reportcreator_api.pentests.customfields.predefined_fields import FINDING_FIELDS_PREDEFINED
|
||||
from reportcreator_api.pentests.customfields.types import field_definition_to_dict
|
||||
|
||||
|
||||
def migrate_predefined_wstg(apps, schema_editor):
|
||||
"""
|
||||
Update predefined_field "wstg_category" in all ProjectTypes that use it
|
||||
"""
|
||||
ProjectType = apps.get_model('pentests', 'ProjectType')
|
||||
|
||||
for pt in ProjectType.objects.filter(finding_fields__wstg_category__isnull=False):
|
||||
pt.finding_fields['wstg_category'] = field_definition_to_dict(FINDING_FIELDS_PREDEFINED['wstg_category'])
|
||||
pt.save()
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('pentests', '0021_projecttype_linked_project'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(code=migrate_predefined_wstg, reverse_code=migrations.RunPython.noop),
|
||||
]
|
|
@ -0,0 +1,53 @@
|
|||
# Generated by Django 4.1.3 on 2022-11-22 07:51
|
||||
|
||||
import django.contrib.postgres.fields
|
||||
import django.core.serializers.json
|
||||
from django.db import migrations, models
|
||||
|
||||
from reportcreator_api.pentests.models import ReviewStatus
|
||||
|
||||
|
||||
def migrate_set_status(apps, schema_editor):
|
||||
PentestFinding = apps.get_model('pentests.PentestFinding')
|
||||
ReportSection = apps.get_model('pentests.ReportSection')
|
||||
FindingTemplate = apps.get_model('pentests.FindingTemplate')
|
||||
|
||||
PentestFinding.objects \
|
||||
.filter(project__readonly=True) \
|
||||
.update(status=ReviewStatus.FINISHED)
|
||||
ReportSection.objects \
|
||||
.filter(project__readonly=True) \
|
||||
.update(status=ReviewStatus.FINISHED)
|
||||
FindingTemplate.objects \
|
||||
.update(status=ReviewStatus.FINISHED)
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('pentests', '0022_auto_20221024_1329'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='pentestfinding',
|
||||
name='status',
|
||||
field=models.CharField(choices=[('in-progress', 'In progress'), ('ready-for-review', 'Ready for review'), ('needs-improvement', 'Needs improvement'), ('finished', 'Finished')], db_index=True, default='in-progress', max_length=20),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='reportsection',
|
||||
name='status',
|
||||
field=models.CharField(choices=[('in-progress', 'In progress'), ('ready-for-review', 'Ready for review'), ('needs-improvement', 'Needs improvement'), ('finished', 'Finished')], db_index=True, default='in-progress', max_length=20),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='findingtemplate',
|
||||
name='status',
|
||||
field=models.CharField(choices=[('in-progress', 'In progress'), ('ready-for-review', 'Ready for review'), ('needs-improvement', 'Needs improvement'), ('finished', 'Finished')], db_index=True, default='in-progress', max_length=20),
|
||||
),
|
||||
migrations.RunPython(code=migrate_set_status, reverse_code=migrations.RunPython.noop),
|
||||
migrations.AlterField(
|
||||
model_name='pentestproject',
|
||||
name='imported_pentesters',
|
||||
field=django.contrib.postgres.fields.ArrayField(base_field=models.JSONField(encoder=django.core.serializers.json.DjangoJSONEncoder), blank=True, default=list, size=None),
|
||||
),
|
||||
]
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue